diff --git a/packages/compiler-cli/test/compliance/r3_view_compiler_i18n_spec.ts b/packages/compiler-cli/test/compliance/r3_view_compiler_i18n_spec.ts
index e9da4eb0c5..99dbc96717 100644
--- a/packages/compiler-cli/test/compliance/r3_view_compiler_i18n_spec.ts
+++ b/packages/compiler-cli/test/compliance/r3_view_compiler_i18n_spec.ts
@@ -41,7 +41,8 @@ const extract = (from: string, regex: any, transformFn: (match: any[]) => any) =
const verifyTranslationIds =
(source: string, output: string, exceptions = {},
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) => {
- const parseResult = htmlParser.parse(source, 'path:://to/template', true);
+ const parseResult =
+ htmlParser.parse(source, 'path:://to/template', {tokenizeExpansionForms: true});
const extractedIdToMsg = new Map();
const extractedIds = new Set();
const generatedIds = new Set();
diff --git a/packages/compiler-cli/test/diagnostics/mocks.ts b/packages/compiler-cli/test/diagnostics/mocks.ts
index 64046cec2c..6542b4bd5d 100644
--- a/packages/compiler-cli/test/diagnostics/mocks.ts
+++ b/packages/compiler-cli/test/diagnostics/mocks.ts
@@ -157,12 +157,7 @@ export class DiagnosticContext {
};
const urlResolver = createOfflineCompileUrlResolver();
const htmlParser = new class extends HtmlParser {
- parse(
- source: string, url: string, parseExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG):
- ParseTreeResult {
- return new ParseTreeResult([], []);
- }
+ parse(): ParseTreeResult { return new ParseTreeResult([], []); }
};
// This tracks the CompileConfig in codegen.ts. Currently these options
@@ -209,7 +204,7 @@ function compileTemplate(context: DiagnosticContext, type: StaticSymbol, templat
const parser = new TemplateParser(
config, context.reflector, expressionParser, new DomElementSchemaRegistry(), htmlParser,
null !, []);
- const htmlResult = htmlParser.parse(template, '', true);
+ const htmlResult = htmlParser.parse(template, '', {tokenizeExpansionForms: true});
const analyzedModules = context.analyzedModules;
// let errors: Diagnostic[]|undefined = undefined;
let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(type);
diff --git a/packages/compiler/src/directive_normalizer.ts b/packages/compiler/src/directive_normalizer.ts
index 06613f361f..38e54961ba 100644
--- a/packages/compiler/src/directive_normalizer.ts
+++ b/packages/compiler/src/directive_normalizer.ts
@@ -113,12 +113,11 @@ export class DirectiveNormalizer {
templateAbsUrl: string): PreparsedTemplate {
const isInline = !!prenormData.template;
const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation !);
+ const templateUrl = templateSourceUrl(
+ {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}},
+ {isInline, templateUrl: templateAbsUrl});
const rootNodesAndErrors = this._htmlParser.parse(
- template,
- templateSourceUrl(
- {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}},
- {isInline, templateUrl: templateAbsUrl}),
- true, interpolationConfig);
+ template, templateUrl, {tokenizeExpansionForms: true, interpolationConfig});
if (rootNodesAndErrors.errors.length > 0) {
const errorString = rootNodesAndErrors.errors.join('\n');
throw syntaxError(`Template parse errors:\n${errorString}`);
diff --git a/packages/compiler/src/i18n/i18n_html_parser.ts b/packages/compiler/src/i18n/i18n_html_parser.ts
index b0c3397c03..57fcceafe3 100644
--- a/packages/compiler/src/i18n/i18n_html_parser.ts
+++ b/packages/compiler/src/i18n/i18n_html_parser.ts
@@ -8,7 +8,8 @@
import {MissingTranslationStrategy} from '../core';
import {HtmlParser} from '../ml_parser/html_parser';
-import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from '../ml_parser/interpolation_config';
+import {DEFAULT_INTERPOLATION_CONFIG} from '../ml_parser/interpolation_config';
+import {TokenizeOptions} from '../ml_parser/lexer';
import {ParseTreeResult} from '../ml_parser/parser';
import {Console} from '../util';
@@ -41,11 +42,9 @@ export class I18NHtmlParser implements HtmlParser {
}
}
- parse(
- source: string, url: string, parseExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
- const parseResult =
- this._htmlParser.parse(source, url, parseExpansionForms, interpolationConfig);
+ parse(source: string, url: string, options: TokenizeOptions = {}): ParseTreeResult {
+ const interpolationConfig = options.interpolationConfig || DEFAULT_INTERPOLATION_CONFIG;
+ const parseResult = this._htmlParser.parse(source, url, {interpolationConfig, ...options});
if (parseResult.errors.length) {
return new ParseTreeResult(parseResult.rootNodes, parseResult.errors);
diff --git a/packages/compiler/src/i18n/message_bundle.ts b/packages/compiler/src/i18n/message_bundle.ts
index e685478367..882ac54eb3 100644
--- a/packages/compiler/src/i18n/message_bundle.ts
+++ b/packages/compiler/src/i18n/message_bundle.ts
@@ -27,7 +27,8 @@ export class MessageBundle {
updateFromTemplate(html: string, url: string, interpolationConfig: InterpolationConfig):
ParseError[] {
- const htmlParserResult = this._htmlParser.parse(html, url, true, interpolationConfig);
+ const htmlParserResult =
+ this._htmlParser.parse(html, url, {tokenizeExpansionForms: true, interpolationConfig});
if (htmlParserResult.errors.length) {
return htmlParserResult.errors;
diff --git a/packages/compiler/src/i18n/serializers/xliff.ts b/packages/compiler/src/i18n/serializers/xliff.ts
index e1efddd9d5..58d9a6083c 100644
--- a/packages/compiler/src/i18n/serializers/xliff.ts
+++ b/packages/compiler/src/i18n/serializers/xliff.ts
@@ -185,7 +185,7 @@ class XliffParser implements ml.Visitor {
this._unitMlString = null;
this._msgIdToHtml = {};
- const xml = new XmlParser().parse(xliff, url, false);
+ const xml = new XmlParser().parse(xliff, url);
this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes, null);
@@ -268,7 +268,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[];
convert(message: string, url: string) {
- const xmlIcu = new XmlParser().parse(message, url, true);
+ const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?
diff --git a/packages/compiler/src/i18n/serializers/xliff2.ts b/packages/compiler/src/i18n/serializers/xliff2.ts
index f43382349f..a8ba85d597 100644
--- a/packages/compiler/src/i18n/serializers/xliff2.ts
+++ b/packages/compiler/src/i18n/serializers/xliff2.ts
@@ -203,7 +203,7 @@ class Xliff2Parser implements ml.Visitor {
this._unitMlString = null;
this._msgIdToHtml = {};
- const xml = new XmlParser().parse(xliff, url, false);
+ const xml = new XmlParser().parse(xliff, url);
this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes, null);
@@ -293,7 +293,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[];
convert(message: string, url: string) {
- const xmlIcu = new XmlParser().parse(message, url, true);
+ const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?
diff --git a/packages/compiler/src/i18n/serializers/xtb.ts b/packages/compiler/src/i18n/serializers/xtb.ts
index c544bc9d70..441c03fa04 100644
--- a/packages/compiler/src/i18n/serializers/xtb.ts
+++ b/packages/compiler/src/i18n/serializers/xtb.ts
@@ -88,7 +88,7 @@ class XtbParser implements ml.Visitor {
// We can not parse the ICU messages at this point as some messages might not originate
// from Angular that could not be lex'd.
- const xml = new XmlParser().parse(xtb, url, false);
+ const xml = new XmlParser().parse(xtb, url);
this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes);
@@ -159,7 +159,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[];
convert(message: string, url: string) {
- const xmlIcu = new XmlParser().parse(message, url, true);
+ const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?
diff --git a/packages/compiler/src/i18n/translation_bundle.ts b/packages/compiler/src/i18n/translation_bundle.ts
index cb6fb4f01c..30da42a421 100644
--- a/packages/compiler/src/i18n/translation_bundle.ts
+++ b/packages/compiler/src/i18n/translation_bundle.ts
@@ -83,7 +83,7 @@ class I18nToHtmlVisitor implements i18n.Visitor {
// text to html
const url = srcMsg.nodes[0].sourceSpan.start.file.url;
- const html = new HtmlParser().parse(text, url, true);
+ const html = new HtmlParser().parse(text, url, {tokenizeExpansionForms: true});
return {
nodes: html.rootNodes,
diff --git a/packages/compiler/src/jit_compiler_facade.ts b/packages/compiler/src/jit_compiler_facade.ts
index d7470f4baa..88c6e5c1e7 100644
--- a/packages/compiler/src/jit_compiler_facade.ts
+++ b/packages/compiler/src/jit_compiler_facade.ts
@@ -112,7 +112,7 @@ export class CompilerFacadeImpl implements CompilerFacade {
// Parse the template and check for errors.
const template = parseTemplate(
facade.template, sourceMapUrl,
- {preserveWhitespaces: facade.preserveWhitespaces || false, interpolationConfig});
+ {preserveWhitespaces: facade.preserveWhitespaces, interpolationConfig});
if (template.errors !== undefined) {
const errors = template.errors.map(err => err.toString()).join(', ');
throw new Error(`Errors during JIT compilation of template for ${facade.name}: ${errors}`);
diff --git a/packages/compiler/src/ml_parser/html_parser.ts b/packages/compiler/src/ml_parser/html_parser.ts
index 60702f25b0..5e788523db 100644
--- a/packages/compiler/src/ml_parser/html_parser.ts
+++ b/packages/compiler/src/ml_parser/html_parser.ts
@@ -7,7 +7,7 @@
*/
import {getHtmlTagDefinition} from './html_tags';
-import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
+import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser';
export {ParseTreeResult, TreeError} from './parser';
@@ -15,9 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class HtmlParser extends Parser {
constructor() { super(getHtmlTagDefinition); }
- parse(
- source: string, url: string, parseExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
- return super.parse(source, url, parseExpansionForms, interpolationConfig);
+ parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
+ return super.parse(source, url, options);
}
}
diff --git a/packages/compiler/src/ml_parser/lexer.ts b/packages/compiler/src/ml_parser/lexer.ts
index b4f45d47ab..3df7c5e986 100644
--- a/packages/compiler/src/ml_parser/lexer.ts
+++ b/packages/compiler/src/ml_parser/lexer.ts
@@ -49,14 +49,20 @@ export class TokenizeResult {
constructor(public tokens: Token[], public errors: TokenError[]) {}
}
+/**
+ * Options that modify how the text is tokenized.
+ */
+export interface TokenizeOptions {
+ /** Whether to tokenize ICU messages (considered as text nodes when false). */
+ tokenizeExpansionForms?: boolean;
+ /** How to tokenize interpolation markers. */
+ interpolationConfig?: InterpolationConfig;
+}
+
export function tokenize(
source: string, url: string, getTagDefinition: (tagName: string) => TagDefinition,
- tokenizeExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): TokenizeResult {
- return new _Tokenizer(
- new ParseSourceFile(source, url), getTagDefinition, tokenizeExpansionForms,
- interpolationConfig)
- .tokenize();
+ options: TokenizeOptions = {}): TokenizeResult {
+ return new _Tokenizer(new ParseSourceFile(source, url), getTagDefinition, options).tokenize();
}
const _CR_OR_CRLF_REGEXP = /\r\n?/g;
@@ -78,6 +84,8 @@ class _ControlFlowError {
class _Tokenizer {
private _input: string;
private _length: number;
+ private _tokenizeIcu: boolean;
+ private _interpolationConfig: InterpolationConfig;
// Note: this is always lowercase!
private _peek: number = -1;
private _nextPeek: number = -1;
@@ -102,8 +110,9 @@ class _Tokenizer {
*/
constructor(
private _file: ParseSourceFile, private _getTagDefinition: (tagName: string) => TagDefinition,
- private _tokenizeIcu: boolean,
- private _interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) {
+ options: TokenizeOptions) {
+ this._tokenizeIcu = options.tokenizeExpansionForms || false;
+ this._interpolationConfig = options.interpolationConfig || DEFAULT_INTERPOLATION_CONFIG;
this._input = _file.content;
this._length = _file.content.length;
this._advance();
diff --git a/packages/compiler/src/ml_parser/parser.ts b/packages/compiler/src/ml_parser/parser.ts
index 3a06bc182d..b74e9d00a2 100644
--- a/packages/compiler/src/ml_parser/parser.ts
+++ b/packages/compiler/src/ml_parser/parser.ts
@@ -9,7 +9,6 @@
import {ParseError, ParseSourceSpan} from '../parse_util';
import * as html from './ast';
-import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
import * as lex from './lexer';
import {TagDefinition, getNsPrefix, isNgContainer, mergeNsAndName} from './tags';
@@ -30,11 +29,8 @@ export class ParseTreeResult {
export class Parser {
constructor(public getTagDefinition: (tagName: string) => TagDefinition) {}
- parse(
- source: string, url: string, parseExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
- const tokensAndErrors =
- lex.tokenize(source, url, this.getTagDefinition, parseExpansionForms, interpolationConfig);
+ parse(source: string, url: string, options?: lex.TokenizeOptions): ParseTreeResult {
+ const tokensAndErrors = lex.tokenize(source, url, this.getTagDefinition, options);
const treeAndErrors = new _TreeBuilder(tokensAndErrors.tokens, this.getTagDefinition).build();
diff --git a/packages/compiler/src/ml_parser/xml_parser.ts b/packages/compiler/src/ml_parser/xml_parser.ts
index 57f49eb105..dc6dffb042 100644
--- a/packages/compiler/src/ml_parser/xml_parser.ts
+++ b/packages/compiler/src/ml_parser/xml_parser.ts
@@ -6,6 +6,7 @@
* found in the LICENSE file at https://angular.io/license
*/
+import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser';
import {getXmlTagDefinition} from './xml_tags';
@@ -14,7 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class XmlParser extends Parser {
constructor() { super(getXmlTagDefinition); }
- parse(source: string, url: string, parseExpansionForms: boolean = false): ParseTreeResult {
- return super.parse(source, url, parseExpansionForms);
+ parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
+ return super.parse(source, url, options);
}
}
diff --git a/packages/compiler/src/render3/view/template.ts b/packages/compiler/src/render3/view/template.ts
index 3563b8718f..a86532ee4b 100644
--- a/packages/compiler/src/render3/view/template.ts
+++ b/packages/compiler/src/render3/view/template.ts
@@ -1562,20 +1562,35 @@ function interpolate(args: o.Expression[]): o.Expression {
return o.importExpr(R3.interpolationV).callFn([o.literalArr(args)]);
}
+/**
+ * Options that can be used to modify how a template is parsed by `parseTemplate()`.
+ */
+export interface ParseTemplateOptions {
+ /**
+ * Include whitespace nodes in the parsed output.
+ */
+ preserveWhitespaces?: boolean;
+ /**
+ * How to parse interpolation markers.
+ */
+ interpolationConfig?: InterpolationConfig;
+}
+
/**
* Parse a template into render3 `Node`s and additional metadata, with no other dependencies.
*
* @param template text of the template to parse
* @param templateUrl URL to use for source mapping of the parsed template
+ * @param options options to modify how the template is parsed
*/
export function parseTemplate(
template: string, templateUrl: string,
- options: {preserveWhitespaces?: boolean, interpolationConfig?: InterpolationConfig} = {}):
- {errors?: ParseError[], nodes: t.Node[]} {
+ options: ParseTemplateOptions = {}): {errors?: ParseError[], nodes: t.Node[]} {
const {interpolationConfig, preserveWhitespaces} = options;
const bindingParser = makeBindingParser(interpolationConfig);
const htmlParser = new HtmlParser();
- const parseResult = htmlParser.parse(template, templateUrl, true, interpolationConfig);
+ const parseResult =
+ htmlParser.parse(template, templateUrl, {...options, tokenizeExpansionForms: true});
if (parseResult.errors && parseResult.errors.length > 0) {
return {errors: parseResult.errors, nodes: []};
diff --git a/packages/compiler/src/template_parser/template_parser.ts b/packages/compiler/src/template_parser/template_parser.ts
index 0b71697782..2a6a866db7 100644
--- a/packages/compiler/src/template_parser/template_parser.ts
+++ b/packages/compiler/src/template_parser/template_parser.ts
@@ -114,8 +114,10 @@ export class TemplateParser {
directives: CompileDirectiveSummary[], pipes: CompilePipeSummary[], schemas: SchemaMetadata[],
templateUrl: string, preserveWhitespaces: boolean): TemplateParseResult {
let htmlParseResult = typeof template === 'string' ?
- this._htmlParser !.parse(
- template, templateUrl, true, this.getInterpolationConfig(component)) :
+ this._htmlParser !.parse(template, templateUrl, {
+ tokenizeExpansionForms: true,
+ interpolationConfig: this.getInterpolationConfig(component)
+ }) :
template;
if (!preserveWhitespaces) {
diff --git a/packages/compiler/test/i18n/extractor_merger_spec.ts b/packages/compiler/test/i18n/extractor_merger_spec.ts
index 7d8f489436..697d8fedae 100644
--- a/packages/compiler/test/i18n/extractor_merger_spec.ts
+++ b/packages/compiler/test/i18n/extractor_merger_spec.ts
@@ -501,7 +501,7 @@ import {serializeNodes as serializeHtmlNodes} from '../ml_parser/util/util';
function parseHtml(html: string): html.Node[] {
const htmlParser = new HtmlParser();
- const parseResult = htmlParser.parse(html, 'extractor spec', true);
+ const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 1) {
throw new Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
}
diff --git a/packages/compiler/test/i18n/i18n_parser_spec.ts b/packages/compiler/test/i18n/i18n_parser_spec.ts
index 4b9994ee27..b71dc0a22f 100644
--- a/packages/compiler/test/i18n/i18n_parser_spec.ts
+++ b/packages/compiler/test/i18n/i18n_parser_spec.ts
@@ -330,7 +330,7 @@ export function _extractMessages(
html: string, implicitTags: string[] = [],
implicitAttrs: {[k: string]: string[]} = {}): Message[] {
const htmlParser = new HtmlParser();
- const parseResult = htmlParser.parse(html, 'extractor spec', true);
+ const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 1) {
throw Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
}
diff --git a/packages/compiler/test/ml_parser/ast_serializer_spec.ts b/packages/compiler/test/ml_parser/ast_serializer_spec.ts
index b880e37909..055b0a7a1c 100644
--- a/packages/compiler/test/ml_parser/ast_serializer_spec.ts
+++ b/packages/compiler/test/ml_parser/ast_serializer_spec.ts
@@ -35,13 +35,13 @@ import {serializeNodes} from './util/util';
it('should support expansion', () => {
const html = '{number, plural, =0 {none} =1 {one} other {many}}';
- const ast = parser.parse(html, 'url', true);
+ const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
});
it('should support comment', () => {
const html = '';
- const ast = parser.parse(html, 'url', true);
+ const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
});
@@ -51,9 +51,9 @@ import {serializeNodes} from './util/util';
{number, plural, =0 {{sex, select, other {?}}}}
-
+
`;
- const ast = parser.parse(html, 'url', true);
+ const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
});
});
diff --git a/packages/compiler/test/ml_parser/html_parser_spec.ts b/packages/compiler/test/ml_parser/html_parser_spec.ts
index 23bbc0168c..38d07fa62a 100644
--- a/packages/compiler/test/ml_parser/html_parser_spec.ts
+++ b/packages/compiler/test/ml_parser/html_parser_spec.ts
@@ -300,7 +300,7 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
it('should parse out expansion forms', () => {
const parsed = parser.parse(
`before{messages.length, plural, =0 {You have no messages} =1 {One {{message}}}}after
`,
- 'TestComp', true);
+ 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([
[html.Element, 'div', 0],
@@ -324,8 +324,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
});
it('should parse out expansion forms', () => {
- const parsed =
- parser.parse(`{a, plural, =0 {b}}
`, 'TestComp', true);
+ const parsed = parser.parse(
+ `{a, plural, =0 {b}}
`, 'TestComp',
+ {tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([
[html.Element, 'div', 0],
@@ -337,7 +338,8 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
it('should parse out nested expansion forms', () => {
const parsed = parser.parse(
- `{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp', true);
+ `{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp',
+ {tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([
[html.Expansion, 'messages.length', 'plural', 0],
[html.ExpansionCase, '=0', 1],
@@ -353,26 +355,31 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
});
it('should error when expansion form is not closed', () => {
- const p = parser.parse(`{messages.length, plural, =0 {one}`, 'TestComp', true);
+ const p = parser.parse(
+ `{messages.length, plural, =0 {one}`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([
[null, 'Invalid ICU message. Missing \'}\'.', '0:34']
]);
});
it('should support ICU expressions with cases that contain numbers', () => {
- const p = parser.parse(`{sex, select, male {m} female {f} 0 {other}}`, 'TestComp', true);
+ const p = parser.parse(
+ `{sex, select, male {m} female {f} 0 {other}}`, 'TestComp',
+ {tokenizeExpansionForms: true});
expect(p.errors.length).toEqual(0);
});
it('should error when expansion case is not closed', () => {
- const p = parser.parse(`{messages.length, plural, =0 {one`, 'TestComp', true);
+ const p = parser.parse(
+ `{messages.length, plural, =0 {one`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([
[null, 'Invalid ICU message. Missing \'}\'.', '0:29']
]);
});
it('should error when invalid html in the case', () => {
- const p = parser.parse(`{messages.length, plural, =0 {}`, 'TestComp', true);
+ const p = parser.parse(
+ `{messages.length, plural, =0 {}`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([
['b', 'Only void and foreign elements can be self closed "b"', '0:30']
]);
@@ -404,8 +411,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
});
it('should support expansion form', () => {
- expect(humanizeDomSourceSpans(
- parser.parse('{count, plural, =0 {msg}}
', 'TestComp', true)))
+ expect(humanizeDomSourceSpans(parser.parse(
+ '{count, plural, =0 {msg}}
', 'TestComp',
+ {tokenizeExpansionForms: true})))
.toEqual([
[html.Element, 'div', 0, ''],
[html.Expansion, 'count', 'plural', 1, '{count, plural, =0 {msg}}'],
diff --git a/packages/compiler/test/ml_parser/icu_ast_expander_spec.ts b/packages/compiler/test/ml_parser/icu_ast_expander_spec.ts
index e74e1a4d25..0682030852 100644
--- a/packages/compiler/test/ml_parser/icu_ast_expander_spec.ts
+++ b/packages/compiler/test/ml_parser/icu_ast_expander_spec.ts
@@ -17,7 +17,7 @@ import {humanizeNodes} from './ast_spec_utils';
describe('Expander', () => {
function expand(template: string): ExpansionResult {
const htmlParser = new HtmlParser();
- const res = htmlParser.parse(template, 'url', true);
+ const res = htmlParser.parse(template, 'url', {tokenizeExpansionForms: true});
return expandNodes(res.rootNodes);
}
diff --git a/packages/compiler/test/ml_parser/lexer_spec.ts b/packages/compiler/test/ml_parser/lexer_spec.ts
index f2fe62a1d5..861b1508d1 100644
--- a/packages/compiler/test/ml_parser/lexer_spec.ts
+++ b/packages/compiler/test/ml_parser/lexer_spec.ts
@@ -443,10 +443,11 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
});
it('should parse interpolation with custom markers', () => {
- expect(tokenizeAndHumanizeParts('{% a %}', null !, {start: '{%', end: '%}'})).toEqual([
- [lex.TokenType.TEXT, '{% a %}'],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts('{% a %}', {interpolationConfig: {start: '{%', end: '%}'}}))
+ .toEqual([
+ [lex.TokenType.TEXT, '{% a %}'],
+ [lex.TokenType.EOF],
+ ]);
});
it('should handle CR & LF', () => {
@@ -524,13 +525,15 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
});
it('should treat expansion form as text when they are not parsed', () => {
- expect(tokenizeAndHumanizeParts('
{a, b, =4 {c}}', false)).toEqual([
- [lex.TokenType.TAG_OPEN_START, null, 'span'],
- [lex.TokenType.TAG_OPEN_END],
- [lex.TokenType.TEXT, '{a, b, =4 {c}}'],
- [lex.TokenType.TAG_CLOSE, null, 'span'],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts(
+ '
{a, b, =4 {c}}', {tokenizeExpansionForms: false}))
+ .toEqual([
+ [lex.TokenType.TAG_OPEN_START, null, 'span'],
+ [lex.TokenType.TAG_OPEN_END],
+ [lex.TokenType.TEXT, '{a, b, =4 {c}}'],
+ [lex.TokenType.TAG_CLOSE, null, 'span'],
+ [lex.TokenType.EOF],
+ ]);
});
});
@@ -641,7 +644,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
describe('expansion forms', () => {
it('should parse an expansion form', () => {
- expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true))
+ expect(
+ tokenizeAndHumanizeParts(
+ '{one.two, three, =4 {four} =5 {five} foo {bar} }', {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'],
@@ -664,75 +669,84 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
});
it('should parse an expansion form with text elements surrounding it', () => {
- expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([
- [lex.TokenType.TEXT, 'before'],
- [lex.TokenType.EXPANSION_FORM_START],
- [lex.TokenType.RAW_TEXT, 'one.two'],
- [lex.TokenType.RAW_TEXT, 'three'],
- [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
- [lex.TokenType.EXPANSION_CASE_EXP_START],
- [lex.TokenType.TEXT, 'four'],
- [lex.TokenType.EXPANSION_CASE_EXP_END],
- [lex.TokenType.EXPANSION_FORM_END],
- [lex.TokenType.TEXT, 'after'],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts(
+ 'before{one.two, three, =4 {four}}after', {tokenizeExpansionForms: true}))
+ .toEqual([
+ [lex.TokenType.TEXT, 'before'],
+ [lex.TokenType.EXPANSION_FORM_START],
+ [lex.TokenType.RAW_TEXT, 'one.two'],
+ [lex.TokenType.RAW_TEXT, 'three'],
+ [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
+ [lex.TokenType.EXPANSION_CASE_EXP_START],
+ [lex.TokenType.TEXT, 'four'],
+ [lex.TokenType.EXPANSION_CASE_EXP_END],
+ [lex.TokenType.EXPANSION_FORM_END],
+ [lex.TokenType.TEXT, 'after'],
+ [lex.TokenType.EOF],
+ ]);
});
it('should parse an expansion form as a tag single child', () => {
- expect(tokenizeAndHumanizeParts('
{a, b, =4 {c}}
', true)).toEqual([
- [lex.TokenType.TAG_OPEN_START, null, 'div'],
- [lex.TokenType.TAG_OPEN_END],
- [lex.TokenType.TAG_OPEN_START, null, 'span'],
- [lex.TokenType.TAG_OPEN_END],
- [lex.TokenType.EXPANSION_FORM_START],
- [lex.TokenType.RAW_TEXT, 'a'],
- [lex.TokenType.RAW_TEXT, 'b'],
- [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
- [lex.TokenType.EXPANSION_CASE_EXP_START],
- [lex.TokenType.TEXT, 'c'],
- [lex.TokenType.EXPANSION_CASE_EXP_END],
- [lex.TokenType.EXPANSION_FORM_END],
- [lex.TokenType.TAG_CLOSE, null, 'span'],
- [lex.TokenType.TAG_CLOSE, null, 'div'],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts(
+ '
{a, b, =4 {c}}
', {tokenizeExpansionForms: true}))
+ .toEqual([
+ [lex.TokenType.TAG_OPEN_START, null, 'div'],
+ [lex.TokenType.TAG_OPEN_END],
+ [lex.TokenType.TAG_OPEN_START, null, 'span'],
+ [lex.TokenType.TAG_OPEN_END],
+ [lex.TokenType.EXPANSION_FORM_START],
+ [lex.TokenType.RAW_TEXT, 'a'],
+ [lex.TokenType.RAW_TEXT, 'b'],
+ [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
+ [lex.TokenType.EXPANSION_CASE_EXP_START],
+ [lex.TokenType.TEXT, 'c'],
+ [lex.TokenType.EXPANSION_CASE_EXP_END],
+ [lex.TokenType.EXPANSION_FORM_END],
+ [lex.TokenType.TAG_CLOSE, null, 'span'],
+ [lex.TokenType.TAG_CLOSE, null, 'div'],
+ [lex.TokenType.EOF],
+ ]);
});
it('should parse an expansion forms with elements in it', () => {
- expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four
a}}', true)).toEqual([
- [lex.TokenType.EXPANSION_FORM_START],
- [lex.TokenType.RAW_TEXT, 'one.two'],
- [lex.TokenType.RAW_TEXT, 'three'],
- [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
- [lex.TokenType.EXPANSION_CASE_EXP_START],
- [lex.TokenType.TEXT, 'four '],
- [lex.TokenType.TAG_OPEN_START, null, 'b'],
- [lex.TokenType.TAG_OPEN_END],
- [lex.TokenType.TEXT, 'a'],
- [lex.TokenType.TAG_CLOSE, null, 'b'],
- [lex.TokenType.EXPANSION_CASE_EXP_END],
- [lex.TokenType.EXPANSION_FORM_END],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts(
+ '{one.two, three, =4 {four
a}}', {tokenizeExpansionForms: true}))
+ .toEqual([
+ [lex.TokenType.EXPANSION_FORM_START],
+ [lex.TokenType.RAW_TEXT, 'one.two'],
+ [lex.TokenType.RAW_TEXT, 'three'],
+ [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
+ [lex.TokenType.EXPANSION_CASE_EXP_START],
+ [lex.TokenType.TEXT, 'four '],
+ [lex.TokenType.TAG_OPEN_START, null, 'b'],
+ [lex.TokenType.TAG_OPEN_END],
+ [lex.TokenType.TEXT, 'a'],
+ [lex.TokenType.TAG_CLOSE, null, 'b'],
+ [lex.TokenType.EXPANSION_CASE_EXP_END],
+ [lex.TokenType.EXPANSION_FORM_END],
+ [lex.TokenType.EOF],
+ ]);
});
it('should parse an expansion forms containing an interpolation', () => {
- expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([
- [lex.TokenType.EXPANSION_FORM_START],
- [lex.TokenType.RAW_TEXT, 'one.two'],
- [lex.TokenType.RAW_TEXT, 'three'],
- [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
- [lex.TokenType.EXPANSION_CASE_EXP_START],
- [lex.TokenType.TEXT, 'four {{a}}'],
- [lex.TokenType.EXPANSION_CASE_EXP_END],
- [lex.TokenType.EXPANSION_FORM_END],
- [lex.TokenType.EOF],
- ]);
+ expect(tokenizeAndHumanizeParts(
+ '{one.two, three, =4 {four {{a}}}}', {tokenizeExpansionForms: true}))
+ .toEqual([
+ [lex.TokenType.EXPANSION_FORM_START],
+ [lex.TokenType.RAW_TEXT, 'one.two'],
+ [lex.TokenType.RAW_TEXT, 'three'],
+ [lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
+ [lex.TokenType.EXPANSION_CASE_EXP_START],
+ [lex.TokenType.TEXT, 'four {{a}}'],
+ [lex.TokenType.EXPANSION_CASE_EXP_END],
+ [lex.TokenType.EXPANSION_FORM_END],
+ [lex.TokenType.EOF],
+ ]);
});
it('should parse nested expansion forms', () => {
- expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true))
+ expect(tokenizeAndHumanizeParts(
+ `{one.two, three, =4 { {xx, yy, =x {one}} }}`, {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'],
@@ -757,11 +771,12 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
describe('errors', () => {
it('should report unescaped "{" on error', () => {
- expect(tokenizeAndHumanizeErrors(`
before { after
`, true)).toEqual([[
- lex.TokenType.RAW_TEXT,
- `Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`,
- '0:21',
- ]]);
+ expect(tokenizeAndHumanizeErrors(`
before { after
`, {tokenizeExpansionForms: true}))
+ .toEqual([[
+ lex.TokenType.RAW_TEXT,
+ `Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`,
+ '0:21',
+ ]]);
});
it('should include 2 lines of context in message', () => {
@@ -790,11 +805,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
});
}
-function tokenizeWithoutErrors(
- input: string, tokenizeExpansionForms: boolean = false,
- interpolationConfig?: InterpolationConfig): lex.Token[] {
- const tokenizeResult = lex.tokenize(
- input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms, interpolationConfig);
+function tokenizeWithoutErrors(input: string, options?: lex.TokenizeOptions): lex.Token[] {
+ const tokenizeResult = lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options);
if (tokenizeResult.errors.length > 0) {
const errorString = tokenizeResult.errors.join('\n');
@@ -804,27 +816,25 @@ function tokenizeWithoutErrors(
return tokenizeResult.tokens;
}
-function tokenizeAndHumanizeParts(
- input: string, tokenizeExpansionForms: boolean = false,
- interpolationConfig?: InterpolationConfig): any[] {
- return tokenizeWithoutErrors(input, tokenizeExpansionForms, interpolationConfig)
- .map(token => [
token.type].concat(token.parts));
+function tokenizeAndHumanizeParts(input: string, options?: lex.TokenizeOptions): any[] {
+ return tokenizeWithoutErrors(input, options).map(token => [token.type].concat(token.parts));
}
-function tokenizeAndHumanizeSourceSpans(input: string): any[] {
- return tokenizeWithoutErrors(input).map(token => [token.type, token.sourceSpan.toString()]);
+function tokenizeAndHumanizeSourceSpans(input: string, options?: lex.TokenizeOptions): any[] {
+ return tokenizeWithoutErrors(input, options)
+ .map(token => [token.type, token.sourceSpan.toString()]);
}
function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`;
}
-function tokenizeAndHumanizeLineColumn(input: string): any[] {
- return tokenizeWithoutErrors(input).map(
- token => [token.type, humanizeLineColumn(token.sourceSpan.start)]);
+function tokenizeAndHumanizeLineColumn(input: string, options?: lex.TokenizeOptions): any[] {
+ return tokenizeWithoutErrors(input, options)
+ .map(token => [token.type, humanizeLineColumn(token.sourceSpan.start)]);
}
-function tokenizeAndHumanizeErrors(input: string, tokenizeExpansionForms: boolean = false): any[] {
- return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms)
+function tokenizeAndHumanizeErrors(input: string, options?: lex.TokenizeOptions): any[] {
+ return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options)
.errors.map(e => [e.tokenType, e.msg, humanizeLineColumn(e.span.start)]);
}
diff --git a/packages/compiler/test/render3/view/util.ts b/packages/compiler/test/render3/view/util.ts
index 26f584ea23..a347df7ace 100644
--- a/packages/compiler/test/render3/view/util.ts
+++ b/packages/compiler/test/render3/view/util.ts
@@ -81,7 +81,8 @@ export function parseR3(
input: string, options: {preserveWhitespaces?: boolean} = {}): Render3ParseResult {
const htmlParser = new HtmlParser();
- const parseResult = htmlParser.parse(input, 'path:://to/template', true);
+ const parseResult =
+ htmlParser.parse(input, 'path:://to/template', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 0) {
const msg = parseResult.errors.map(e => e.toString()).join('\n');
diff --git a/packages/language-service/src/language_service.ts b/packages/language-service/src/language_service.ts
index d3e48a0c12..c76a3039ed 100644
--- a/packages/language-service/src/language_service.ts
+++ b/packages/language-service/src/language_service.ts
@@ -112,7 +112,7 @@ class LanguageServiceImpl implements LanguageService {
const parser = new TemplateParser(
config, this.host.resolver.getReflector(), expressionParser,
new DomElementSchemaRegistry(), htmlParser, null !, []);
- const htmlResult = htmlParser.parse(template.source, '', true);
+ const htmlResult = htmlParser.parse(template.source, '', {tokenizeExpansionForms: true});
const analyzedModules = this.host.getAnalyzedModules();
let errors: Diagnostic[]|undefined = undefined;
let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(template.type);
diff --git a/packages/language-service/src/typescript_host.ts b/packages/language-service/src/typescript_host.ts
index a9cad8380e..ec1d3c7f87 100644
--- a/packages/language-service/src/typescript_host.ts
+++ b/packages/language-service/src/typescript_host.ts
@@ -38,11 +38,7 @@ export function createLanguageServiceFromTypescript(
* syntactically incorrect templates.
*/
export class DummyHtmlParser extends HtmlParser {
- parse(
- source: string, url: string, parseExpansionForms: boolean = false,
- interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
- return new ParseTreeResult([], []);
- }
+ parse(): ParseTreeResult { return new ParseTreeResult([], []); }
}
/**