refactor(compiler): use `options` argument for parsers (#28055)

This commit consolidates the options that can modify the
parsing of text (e.g. HTML, Angular templates, CSS, i18n)
into an AST for further processing into a single `options`
hash.

This makes the code cleaner and more readable, but also
enables us to support further options to parsing without
triggering wide ranging changes to code that should not
be affected by these new options.  Specifically, it will let
us pass information about the placement of a template
that is being parsed in its containing file, which is essential
for accurate SourceMap processing.

PR Close #28055
This commit is contained in:
Pete Bacon Darwin 2019-02-08 22:10:19 +00:00 committed by Misko Hevery
parent 81df5dcfc0
commit 673ac2945c
25 changed files with 200 additions and 169 deletions

View File

@ -41,7 +41,8 @@ const extract = (from: string, regex: any, transformFn: (match: any[]) => any) =
const verifyTranslationIds = const verifyTranslationIds =
(source: string, output: string, exceptions = {}, (source: string, output: string, exceptions = {},
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) => { interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) => {
const parseResult = htmlParser.parse(source, 'path:://to/template', true); const parseResult =
htmlParser.parse(source, 'path:://to/template', {tokenizeExpansionForms: true});
const extractedIdToMsg = new Map<string, any>(); const extractedIdToMsg = new Map<string, any>();
const extractedIds = new Set<string>(); const extractedIds = new Set<string>();
const generatedIds = new Set<string>(); const generatedIds = new Set<string>();

View File

@ -157,12 +157,7 @@ export class DiagnosticContext {
}; };
const urlResolver = createOfflineCompileUrlResolver(); const urlResolver = createOfflineCompileUrlResolver();
const htmlParser = new class extends HtmlParser { const htmlParser = new class extends HtmlParser {
parse( parse(): ParseTreeResult { return new ParseTreeResult([], []); }
source: string, url: string, parseExpansionForms: boolean = false,
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG):
ParseTreeResult {
return new ParseTreeResult([], []);
}
}; };
// This tracks the CompileConfig in codegen.ts. Currently these options // This tracks the CompileConfig in codegen.ts. Currently these options
@ -209,7 +204,7 @@ function compileTemplate(context: DiagnosticContext, type: StaticSymbol, templat
const parser = new TemplateParser( const parser = new TemplateParser(
config, context.reflector, expressionParser, new DomElementSchemaRegistry(), htmlParser, config, context.reflector, expressionParser, new DomElementSchemaRegistry(), htmlParser,
null !, []); null !, []);
const htmlResult = htmlParser.parse(template, '', true); const htmlResult = htmlParser.parse(template, '', {tokenizeExpansionForms: true});
const analyzedModules = context.analyzedModules; const analyzedModules = context.analyzedModules;
// let errors: Diagnostic[]|undefined = undefined; // let errors: Diagnostic[]|undefined = undefined;
let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(type); let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(type);

View File

@ -113,12 +113,11 @@ export class DirectiveNormalizer {
templateAbsUrl: string): PreparsedTemplate { templateAbsUrl: string): PreparsedTemplate {
const isInline = !!prenormData.template; const isInline = !!prenormData.template;
const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation !); const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation !);
const rootNodesAndErrors = this._htmlParser.parse( const templateUrl = templateSourceUrl(
template,
templateSourceUrl(
{reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}}, {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}},
{isInline, templateUrl: templateAbsUrl}), {isInline, templateUrl: templateAbsUrl});
true, interpolationConfig); const rootNodesAndErrors = this._htmlParser.parse(
template, templateUrl, {tokenizeExpansionForms: true, interpolationConfig});
if (rootNodesAndErrors.errors.length > 0) { if (rootNodesAndErrors.errors.length > 0) {
const errorString = rootNodesAndErrors.errors.join('\n'); const errorString = rootNodesAndErrors.errors.join('\n');
throw syntaxError(`Template parse errors:\n${errorString}`); throw syntaxError(`Template parse errors:\n${errorString}`);

View File

@ -8,7 +8,8 @@
import {MissingTranslationStrategy} from '../core'; import {MissingTranslationStrategy} from '../core';
import {HtmlParser} from '../ml_parser/html_parser'; import {HtmlParser} from '../ml_parser/html_parser';
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from '../ml_parser/interpolation_config'; import {DEFAULT_INTERPOLATION_CONFIG} from '../ml_parser/interpolation_config';
import {TokenizeOptions} from '../ml_parser/lexer';
import {ParseTreeResult} from '../ml_parser/parser'; import {ParseTreeResult} from '../ml_parser/parser';
import {Console} from '../util'; import {Console} from '../util';
@ -41,11 +42,9 @@ export class I18NHtmlParser implements HtmlParser {
} }
} }
parse( parse(source: string, url: string, options: TokenizeOptions = {}): ParseTreeResult {
source: string, url: string, parseExpansionForms: boolean = false, const interpolationConfig = options.interpolationConfig || DEFAULT_INTERPOLATION_CONFIG;
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult { const parseResult = this._htmlParser.parse(source, url, {interpolationConfig, ...options});
const parseResult =
this._htmlParser.parse(source, url, parseExpansionForms, interpolationConfig);
if (parseResult.errors.length) { if (parseResult.errors.length) {
return new ParseTreeResult(parseResult.rootNodes, parseResult.errors); return new ParseTreeResult(parseResult.rootNodes, parseResult.errors);

View File

@ -27,7 +27,8 @@ export class MessageBundle {
updateFromTemplate(html: string, url: string, interpolationConfig: InterpolationConfig): updateFromTemplate(html: string, url: string, interpolationConfig: InterpolationConfig):
ParseError[] { ParseError[] {
const htmlParserResult = this._htmlParser.parse(html, url, true, interpolationConfig); const htmlParserResult =
this._htmlParser.parse(html, url, {tokenizeExpansionForms: true, interpolationConfig});
if (htmlParserResult.errors.length) { if (htmlParserResult.errors.length) {
return htmlParserResult.errors; return htmlParserResult.errors;

View File

@ -185,7 +185,7 @@ class XliffParser implements ml.Visitor {
this._unitMlString = null; this._unitMlString = null;
this._msgIdToHtml = {}; this._msgIdToHtml = {};
const xml = new XmlParser().parse(xliff, url, false); const xml = new XmlParser().parse(xliff, url);
this._errors = xml.errors; this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes, null); ml.visitAll(this, xml.rootNodes, null);
@ -268,7 +268,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[]; private _errors !: I18nError[];
convert(message: string, url: string) { convert(message: string, url: string) {
const xmlIcu = new XmlParser().parse(message, url, true); const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors; this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ? const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?

View File

@ -203,7 +203,7 @@ class Xliff2Parser implements ml.Visitor {
this._unitMlString = null; this._unitMlString = null;
this._msgIdToHtml = {}; this._msgIdToHtml = {};
const xml = new XmlParser().parse(xliff, url, false); const xml = new XmlParser().parse(xliff, url);
this._errors = xml.errors; this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes, null); ml.visitAll(this, xml.rootNodes, null);
@ -293,7 +293,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[]; private _errors !: I18nError[];
convert(message: string, url: string) { convert(message: string, url: string) {
const xmlIcu = new XmlParser().parse(message, url, true); const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors; this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ? const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?

View File

@ -88,7 +88,7 @@ class XtbParser implements ml.Visitor {
// We can not parse the ICU messages at this point as some messages might not originate // We can not parse the ICU messages at this point as some messages might not originate
// from Angular that could not be lex'd. // from Angular that could not be lex'd.
const xml = new XmlParser().parse(xtb, url, false); const xml = new XmlParser().parse(xtb, url);
this._errors = xml.errors; this._errors = xml.errors;
ml.visitAll(this, xml.rootNodes); ml.visitAll(this, xml.rootNodes);
@ -159,7 +159,7 @@ class XmlToI18n implements ml.Visitor {
private _errors !: I18nError[]; private _errors !: I18nError[];
convert(message: string, url: string) { convert(message: string, url: string) {
const xmlIcu = new XmlParser().parse(message, url, true); const xmlIcu = new XmlParser().parse(message, url, {tokenizeExpansionForms: true});
this._errors = xmlIcu.errors; this._errors = xmlIcu.errors;
const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ? const i18nNodes = this._errors.length > 0 || xmlIcu.rootNodes.length == 0 ?

View File

@ -83,7 +83,7 @@ class I18nToHtmlVisitor implements i18n.Visitor {
// text to html // text to html
const url = srcMsg.nodes[0].sourceSpan.start.file.url; const url = srcMsg.nodes[0].sourceSpan.start.file.url;
const html = new HtmlParser().parse(text, url, true); const html = new HtmlParser().parse(text, url, {tokenizeExpansionForms: true});
return { return {
nodes: html.rootNodes, nodes: html.rootNodes,

View File

@ -112,7 +112,7 @@ export class CompilerFacadeImpl implements CompilerFacade {
// Parse the template and check for errors. // Parse the template and check for errors.
const template = parseTemplate( const template = parseTemplate(
facade.template, sourceMapUrl, facade.template, sourceMapUrl,
{preserveWhitespaces: facade.preserveWhitespaces || false, interpolationConfig}); {preserveWhitespaces: facade.preserveWhitespaces, interpolationConfig});
if (template.errors !== undefined) { if (template.errors !== undefined) {
const errors = template.errors.map(err => err.toString()).join(', '); const errors = template.errors.map(err => err.toString()).join(', ');
throw new Error(`Errors during JIT compilation of template for ${facade.name}: ${errors}`); throw new Error(`Errors during JIT compilation of template for ${facade.name}: ${errors}`);

View File

@ -7,7 +7,7 @@
*/ */
import {getHtmlTagDefinition} from './html_tags'; import {getHtmlTagDefinition} from './html_tags';
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config'; import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser'; import {ParseTreeResult, Parser} from './parser';
export {ParseTreeResult, TreeError} from './parser'; export {ParseTreeResult, TreeError} from './parser';
@ -15,9 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class HtmlParser extends Parser { export class HtmlParser extends Parser {
constructor() { super(getHtmlTagDefinition); } constructor() { super(getHtmlTagDefinition); }
parse( parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
source: string, url: string, parseExpansionForms: boolean = false, return super.parse(source, url, options);
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
return super.parse(source, url, parseExpansionForms, interpolationConfig);
} }
} }

View File

@ -49,14 +49,20 @@ export class TokenizeResult {
constructor(public tokens: Token[], public errors: TokenError[]) {} constructor(public tokens: Token[], public errors: TokenError[]) {}
} }
/**
* Options that modify how the text is tokenized.
*/
export interface TokenizeOptions {
/** Whether to tokenize ICU messages (considered as text nodes when false). */
tokenizeExpansionForms?: boolean;
/** How to tokenize interpolation markers. */
interpolationConfig?: InterpolationConfig;
}
export function tokenize( export function tokenize(
source: string, url: string, getTagDefinition: (tagName: string) => TagDefinition, source: string, url: string, getTagDefinition: (tagName: string) => TagDefinition,
tokenizeExpansionForms: boolean = false, options: TokenizeOptions = {}): TokenizeResult {
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): TokenizeResult { return new _Tokenizer(new ParseSourceFile(source, url), getTagDefinition, options).tokenize();
return new _Tokenizer(
new ParseSourceFile(source, url), getTagDefinition, tokenizeExpansionForms,
interpolationConfig)
.tokenize();
} }
const _CR_OR_CRLF_REGEXP = /\r\n?/g; const _CR_OR_CRLF_REGEXP = /\r\n?/g;
@ -78,6 +84,8 @@ class _ControlFlowError {
class _Tokenizer { class _Tokenizer {
private _input: string; private _input: string;
private _length: number; private _length: number;
private _tokenizeIcu: boolean;
private _interpolationConfig: InterpolationConfig;
// Note: this is always lowercase! // Note: this is always lowercase!
private _peek: number = -1; private _peek: number = -1;
private _nextPeek: number = -1; private _nextPeek: number = -1;
@ -102,8 +110,9 @@ class _Tokenizer {
*/ */
constructor( constructor(
private _file: ParseSourceFile, private _getTagDefinition: (tagName: string) => TagDefinition, private _file: ParseSourceFile, private _getTagDefinition: (tagName: string) => TagDefinition,
private _tokenizeIcu: boolean, options: TokenizeOptions) {
private _interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) { this._tokenizeIcu = options.tokenizeExpansionForms || false;
this._interpolationConfig = options.interpolationConfig || DEFAULT_INTERPOLATION_CONFIG;
this._input = _file.content; this._input = _file.content;
this._length = _file.content.length; this._length = _file.content.length;
this._advance(); this._advance();

View File

@ -9,7 +9,6 @@
import {ParseError, ParseSourceSpan} from '../parse_util'; import {ParseError, ParseSourceSpan} from '../parse_util';
import * as html from './ast'; import * as html from './ast';
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
import * as lex from './lexer'; import * as lex from './lexer';
import {TagDefinition, getNsPrefix, isNgContainer, mergeNsAndName} from './tags'; import {TagDefinition, getNsPrefix, isNgContainer, mergeNsAndName} from './tags';
@ -30,11 +29,8 @@ export class ParseTreeResult {
export class Parser { export class Parser {
constructor(public getTagDefinition: (tagName: string) => TagDefinition) {} constructor(public getTagDefinition: (tagName: string) => TagDefinition) {}
parse( parse(source: string, url: string, options?: lex.TokenizeOptions): ParseTreeResult {
source: string, url: string, parseExpansionForms: boolean = false, const tokensAndErrors = lex.tokenize(source, url, this.getTagDefinition, options);
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
const tokensAndErrors =
lex.tokenize(source, url, this.getTagDefinition, parseExpansionForms, interpolationConfig);
const treeAndErrors = new _TreeBuilder(tokensAndErrors.tokens, this.getTagDefinition).build(); const treeAndErrors = new _TreeBuilder(tokensAndErrors.tokens, this.getTagDefinition).build();

View File

@ -6,6 +6,7 @@
* found in the LICENSE file at https://angular.io/license * found in the LICENSE file at https://angular.io/license
*/ */
import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser'; import {ParseTreeResult, Parser} from './parser';
import {getXmlTagDefinition} from './xml_tags'; import {getXmlTagDefinition} from './xml_tags';
@ -14,7 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class XmlParser extends Parser { export class XmlParser extends Parser {
constructor() { super(getXmlTagDefinition); } constructor() { super(getXmlTagDefinition); }
parse(source: string, url: string, parseExpansionForms: boolean = false): ParseTreeResult { parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
return super.parse(source, url, parseExpansionForms); return super.parse(source, url, options);
} }
} }

View File

@ -1562,20 +1562,35 @@ function interpolate(args: o.Expression[]): o.Expression {
return o.importExpr(R3.interpolationV).callFn([o.literalArr(args)]); return o.importExpr(R3.interpolationV).callFn([o.literalArr(args)]);
} }
/**
* Options that can be used to modify how a template is parsed by `parseTemplate()`.
*/
export interface ParseTemplateOptions {
/**
* Include whitespace nodes in the parsed output.
*/
preserveWhitespaces?: boolean;
/**
* How to parse interpolation markers.
*/
interpolationConfig?: InterpolationConfig;
}
/** /**
* Parse a template into render3 `Node`s and additional metadata, with no other dependencies. * Parse a template into render3 `Node`s and additional metadata, with no other dependencies.
* *
* @param template text of the template to parse * @param template text of the template to parse
* @param templateUrl URL to use for source mapping of the parsed template * @param templateUrl URL to use for source mapping of the parsed template
* @param options options to modify how the template is parsed
*/ */
export function parseTemplate( export function parseTemplate(
template: string, templateUrl: string, template: string, templateUrl: string,
options: {preserveWhitespaces?: boolean, interpolationConfig?: InterpolationConfig} = {}): options: ParseTemplateOptions = {}): {errors?: ParseError[], nodes: t.Node[]} {
{errors?: ParseError[], nodes: t.Node[]} {
const {interpolationConfig, preserveWhitespaces} = options; const {interpolationConfig, preserveWhitespaces} = options;
const bindingParser = makeBindingParser(interpolationConfig); const bindingParser = makeBindingParser(interpolationConfig);
const htmlParser = new HtmlParser(); const htmlParser = new HtmlParser();
const parseResult = htmlParser.parse(template, templateUrl, true, interpolationConfig); const parseResult =
htmlParser.parse(template, templateUrl, {...options, tokenizeExpansionForms: true});
if (parseResult.errors && parseResult.errors.length > 0) { if (parseResult.errors && parseResult.errors.length > 0) {
return {errors: parseResult.errors, nodes: []}; return {errors: parseResult.errors, nodes: []};

View File

@ -114,8 +114,10 @@ export class TemplateParser {
directives: CompileDirectiveSummary[], pipes: CompilePipeSummary[], schemas: SchemaMetadata[], directives: CompileDirectiveSummary[], pipes: CompilePipeSummary[], schemas: SchemaMetadata[],
templateUrl: string, preserveWhitespaces: boolean): TemplateParseResult { templateUrl: string, preserveWhitespaces: boolean): TemplateParseResult {
let htmlParseResult = typeof template === 'string' ? let htmlParseResult = typeof template === 'string' ?
this._htmlParser !.parse( this._htmlParser !.parse(template, templateUrl, {
template, templateUrl, true, this.getInterpolationConfig(component)) : tokenizeExpansionForms: true,
interpolationConfig: this.getInterpolationConfig(component)
}) :
template; template;
if (!preserveWhitespaces) { if (!preserveWhitespaces) {

View File

@ -501,7 +501,7 @@ import {serializeNodes as serializeHtmlNodes} from '../ml_parser/util/util';
function parseHtml(html: string): html.Node[] { function parseHtml(html: string): html.Node[] {
const htmlParser = new HtmlParser(); const htmlParser = new HtmlParser();
const parseResult = htmlParser.parse(html, 'extractor spec', true); const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 1) { if (parseResult.errors.length > 1) {
throw new Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`); throw new Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
} }

View File

@ -330,7 +330,7 @@ export function _extractMessages(
html: string, implicitTags: string[] = [], html: string, implicitTags: string[] = [],
implicitAttrs: {[k: string]: string[]} = {}): Message[] { implicitAttrs: {[k: string]: string[]} = {}): Message[] {
const htmlParser = new HtmlParser(); const htmlParser = new HtmlParser();
const parseResult = htmlParser.parse(html, 'extractor spec', true); const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 1) { if (parseResult.errors.length > 1) {
throw Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`); throw Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
} }

View File

@ -35,13 +35,13 @@ import {serializeNodes} from './util/util';
it('should support expansion', () => { it('should support expansion', () => {
const html = '{number, plural, =0 {none} =1 {one} other {many}}'; const html = '{number, plural, =0 {none} =1 {one} other {many}}';
const ast = parser.parse(html, 'url', true); const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]); expect(serializeNodes(ast.rootNodes)).toEqual([html]);
}); });
it('should support comment', () => { it('should support comment', () => {
const html = '<!--comment-->'; const html = '<!--comment-->';
const ast = parser.parse(html, 'url', true); const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]); expect(serializeNodes(ast.rootNodes)).toEqual([html]);
}); });
@ -53,7 +53,7 @@ import {serializeNodes} from './util/util';
{number, plural, =0 {{sex, select, other {<b>?</b>}}}} {number, plural, =0 {{sex, select, other {<b>?</b>}}}}
</p> </p>
</div>`; </div>`;
const ast = parser.parse(html, 'url', true); const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
expect(serializeNodes(ast.rootNodes)).toEqual([html]); expect(serializeNodes(ast.rootNodes)).toEqual([html]);
}); });
}); });

View File

@ -300,7 +300,7 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
it('should parse out expansion forms', () => { it('should parse out expansion forms', () => {
const parsed = parser.parse( const parsed = parser.parse(
`<div>before{messages.length, plural, =0 {You have <b>no</b> messages} =1 {One {{message}}}}after</div>`, `<div>before{messages.length, plural, =0 {You have <b>no</b> messages} =1 {One {{message}}}}after</div>`,
'TestComp', true); 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([ expect(humanizeDom(parsed)).toEqual([
[html.Element, 'div', 0], [html.Element, 'div', 0],
@ -324,8 +324,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
}); });
it('should parse out expansion forms', () => { it('should parse out expansion forms', () => {
const parsed = const parsed = parser.parse(
parser.parse(`<div><span>{a, plural, =0 {b}}</span></div>`, 'TestComp', true); `<div><span>{a, plural, =0 {b}}</span></div>`, 'TestComp',
{tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([ expect(humanizeDom(parsed)).toEqual([
[html.Element, 'div', 0], [html.Element, 'div', 0],
@ -337,7 +338,8 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
it('should parse out nested expansion forms', () => { it('should parse out nested expansion forms', () => {
const parsed = parser.parse( const parsed = parser.parse(
`{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp', true); `{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp',
{tokenizeExpansionForms: true});
expect(humanizeDom(parsed)).toEqual([ expect(humanizeDom(parsed)).toEqual([
[html.Expansion, 'messages.length', 'plural', 0], [html.Expansion, 'messages.length', 'plural', 0],
[html.ExpansionCase, '=0', 1], [html.ExpansionCase, '=0', 1],
@ -353,26 +355,31 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
}); });
it('should error when expansion form is not closed', () => { it('should error when expansion form is not closed', () => {
const p = parser.parse(`{messages.length, plural, =0 {one}`, 'TestComp', true); const p = parser.parse(
`{messages.length, plural, =0 {one}`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([ expect(humanizeErrors(p.errors)).toEqual([
[null, 'Invalid ICU message. Missing \'}\'.', '0:34'] [null, 'Invalid ICU message. Missing \'}\'.', '0:34']
]); ]);
}); });
it('should support ICU expressions with cases that contain numbers', () => { it('should support ICU expressions with cases that contain numbers', () => {
const p = parser.parse(`{sex, select, male {m} female {f} 0 {other}}`, 'TestComp', true); const p = parser.parse(
`{sex, select, male {m} female {f} 0 {other}}`, 'TestComp',
{tokenizeExpansionForms: true});
expect(p.errors.length).toEqual(0); expect(p.errors.length).toEqual(0);
}); });
it('should error when expansion case is not closed', () => { it('should error when expansion case is not closed', () => {
const p = parser.parse(`{messages.length, plural, =0 {one`, 'TestComp', true); const p = parser.parse(
`{messages.length, plural, =0 {one`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([ expect(humanizeErrors(p.errors)).toEqual([
[null, 'Invalid ICU message. Missing \'}\'.', '0:29'] [null, 'Invalid ICU message. Missing \'}\'.', '0:29']
]); ]);
}); });
it('should error when invalid html in the case', () => { it('should error when invalid html in the case', () => {
const p = parser.parse(`{messages.length, plural, =0 {<b/>}`, 'TestComp', true); const p = parser.parse(
`{messages.length, plural, =0 {<b/>}`, 'TestComp', {tokenizeExpansionForms: true});
expect(humanizeErrors(p.errors)).toEqual([ expect(humanizeErrors(p.errors)).toEqual([
['b', 'Only void and foreign elements can be self closed "b"', '0:30'] ['b', 'Only void and foreign elements can be self closed "b"', '0:30']
]); ]);
@ -404,8 +411,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
}); });
it('should support expansion form', () => { it('should support expansion form', () => {
expect(humanizeDomSourceSpans( expect(humanizeDomSourceSpans(parser.parse(
parser.parse('<div>{count, plural, =0 {msg}}</div>', 'TestComp', true))) '<div>{count, plural, =0 {msg}}</div>', 'TestComp',
{tokenizeExpansionForms: true})))
.toEqual([ .toEqual([
[html.Element, 'div', 0, '<div>'], [html.Element, 'div', 0, '<div>'],
[html.Expansion, 'count', 'plural', 1, '{count, plural, =0 {msg}}'], [html.Expansion, 'count', 'plural', 1, '{count, plural, =0 {msg}}'],

View File

@ -17,7 +17,7 @@ import {humanizeNodes} from './ast_spec_utils';
describe('Expander', () => { describe('Expander', () => {
function expand(template: string): ExpansionResult { function expand(template: string): ExpansionResult {
const htmlParser = new HtmlParser(); const htmlParser = new HtmlParser();
const res = htmlParser.parse(template, 'url', true); const res = htmlParser.parse(template, 'url', {tokenizeExpansionForms: true});
return expandNodes(res.rootNodes); return expandNodes(res.rootNodes);
} }

View File

@ -443,7 +443,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse interpolation with custom markers', () => { it('should parse interpolation with custom markers', () => {
expect(tokenizeAndHumanizeParts('{% a %}', null !, {start: '{%', end: '%}'})).toEqual([ expect(tokenizeAndHumanizeParts('{% a %}', {interpolationConfig: {start: '{%', end: '%}'}}))
.toEqual([
[lex.TokenType.TEXT, '{% a %}'], [lex.TokenType.TEXT, '{% a %}'],
[lex.TokenType.EOF], [lex.TokenType.EOF],
]); ]);
@ -524,7 +525,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should treat expansion form as text when they are not parsed', () => { it('should treat expansion form as text when they are not parsed', () => {
expect(tokenizeAndHumanizeParts('<span>{a, b, =4 {c}}</span>', false)).toEqual([ expect(tokenizeAndHumanizeParts(
'<span>{a, b, =4 {c}}</span>', {tokenizeExpansionForms: false}))
.toEqual([
[lex.TokenType.TAG_OPEN_START, null, 'span'], [lex.TokenType.TAG_OPEN_START, null, 'span'],
[lex.TokenType.TAG_OPEN_END], [lex.TokenType.TAG_OPEN_END],
[lex.TokenType.TEXT, '{a, b, =4 {c}}'], [lex.TokenType.TEXT, '{a, b, =4 {c}}'],
@ -641,7 +644,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
describe('expansion forms', () => { describe('expansion forms', () => {
it('should parse an expansion form', () => { it('should parse an expansion form', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true)) expect(
tokenizeAndHumanizeParts(
'{one.two, three, =4 {four} =5 {five} foo {bar} }', {tokenizeExpansionForms: true}))
.toEqual([ .toEqual([
[lex.TokenType.EXPANSION_FORM_START], [lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'], [lex.TokenType.RAW_TEXT, 'one.two'],
@ -664,7 +669,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse an expansion form with text elements surrounding it', () => { it('should parse an expansion form with text elements surrounding it', () => {
expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([ expect(tokenizeAndHumanizeParts(
'before{one.two, three, =4 {four}}after', {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.TEXT, 'before'], [lex.TokenType.TEXT, 'before'],
[lex.TokenType.EXPANSION_FORM_START], [lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'], [lex.TokenType.RAW_TEXT, 'one.two'],
@ -680,7 +687,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse an expansion form as a tag single child', () => { it('should parse an expansion form as a tag single child', () => {
expect(tokenizeAndHumanizeParts('<div><span>{a, b, =4 {c}}</span></div>', true)).toEqual([ expect(tokenizeAndHumanizeParts(
'<div><span>{a, b, =4 {c}}</span></div>', {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.TAG_OPEN_START, null, 'div'], [lex.TokenType.TAG_OPEN_START, null, 'div'],
[lex.TokenType.TAG_OPEN_END], [lex.TokenType.TAG_OPEN_END],
[lex.TokenType.TAG_OPEN_START, null, 'span'], [lex.TokenType.TAG_OPEN_START, null, 'span'],
@ -700,7 +709,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse an expansion forms with elements in it', () => { it('should parse an expansion forms with elements in it', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four <b>a</b>}}', true)).toEqual([ expect(tokenizeAndHumanizeParts(
'{one.two, three, =4 {four <b>a</b>}}', {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.EXPANSION_FORM_START], [lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'], [lex.TokenType.RAW_TEXT, 'one.two'],
[lex.TokenType.RAW_TEXT, 'three'], [lex.TokenType.RAW_TEXT, 'three'],
@ -718,7 +729,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse an expansion forms containing an interpolation', () => { it('should parse an expansion forms containing an interpolation', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([ expect(tokenizeAndHumanizeParts(
'{one.two, three, =4 {four {{a}}}}', {tokenizeExpansionForms: true}))
.toEqual([
[lex.TokenType.EXPANSION_FORM_START], [lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'], [lex.TokenType.RAW_TEXT, 'one.two'],
[lex.TokenType.RAW_TEXT, 'three'], [lex.TokenType.RAW_TEXT, 'three'],
@ -732,7 +745,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
it('should parse nested expansion forms', () => { it('should parse nested expansion forms', () => {
expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true)) expect(tokenizeAndHumanizeParts(
`{one.two, three, =4 { {xx, yy, =x {one}} }}`, {tokenizeExpansionForms: true}))
.toEqual([ .toEqual([
[lex.TokenType.EXPANSION_FORM_START], [lex.TokenType.EXPANSION_FORM_START],
[lex.TokenType.RAW_TEXT, 'one.two'], [lex.TokenType.RAW_TEXT, 'one.two'],
@ -757,7 +771,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
describe('errors', () => { describe('errors', () => {
it('should report unescaped "{" on error', () => { it('should report unescaped "{" on error', () => {
expect(tokenizeAndHumanizeErrors(`<p>before { after</p>`, true)).toEqual([[ expect(tokenizeAndHumanizeErrors(`<p>before { after</p>`, {tokenizeExpansionForms: true}))
.toEqual([[
lex.TokenType.RAW_TEXT, lex.TokenType.RAW_TEXT,
`Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`, `Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`,
'0:21', '0:21',
@ -790,11 +805,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
}); });
} }
function tokenizeWithoutErrors( function tokenizeWithoutErrors(input: string, options?: lex.TokenizeOptions): lex.Token[] {
input: string, tokenizeExpansionForms: boolean = false, const tokenizeResult = lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options);
interpolationConfig?: InterpolationConfig): lex.Token[] {
const tokenizeResult = lex.tokenize(
input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms, interpolationConfig);
if (tokenizeResult.errors.length > 0) { if (tokenizeResult.errors.length > 0) {
const errorString = tokenizeResult.errors.join('\n'); const errorString = tokenizeResult.errors.join('\n');
@ -804,27 +816,25 @@ function tokenizeWithoutErrors(
return tokenizeResult.tokens; return tokenizeResult.tokens;
} }
function tokenizeAndHumanizeParts( function tokenizeAndHumanizeParts(input: string, options?: lex.TokenizeOptions): any[] {
input: string, tokenizeExpansionForms: boolean = false, return tokenizeWithoutErrors(input, options).map(token => [<any>token.type].concat(token.parts));
interpolationConfig?: InterpolationConfig): any[] {
return tokenizeWithoutErrors(input, tokenizeExpansionForms, interpolationConfig)
.map(token => [<any>token.type].concat(token.parts));
} }
function tokenizeAndHumanizeSourceSpans(input: string): any[] { function tokenizeAndHumanizeSourceSpans(input: string, options?: lex.TokenizeOptions): any[] {
return tokenizeWithoutErrors(input).map(token => [<any>token.type, token.sourceSpan.toString()]); return tokenizeWithoutErrors(input, options)
.map(token => [<any>token.type, token.sourceSpan.toString()]);
} }
function humanizeLineColumn(location: ParseLocation): string { function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`; return `${location.line}:${location.col}`;
} }
function tokenizeAndHumanizeLineColumn(input: string): any[] { function tokenizeAndHumanizeLineColumn(input: string, options?: lex.TokenizeOptions): any[] {
return tokenizeWithoutErrors(input).map( return tokenizeWithoutErrors(input, options)
token => [<any>token.type, humanizeLineColumn(token.sourceSpan.start)]); .map(token => [<any>token.type, humanizeLineColumn(token.sourceSpan.start)]);
} }
function tokenizeAndHumanizeErrors(input: string, tokenizeExpansionForms: boolean = false): any[] { function tokenizeAndHumanizeErrors(input: string, options?: lex.TokenizeOptions): any[] {
return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms) return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options)
.errors.map(e => [<any>e.tokenType, e.msg, humanizeLineColumn(e.span.start)]); .errors.map(e => [<any>e.tokenType, e.msg, humanizeLineColumn(e.span.start)]);
} }

View File

@ -81,7 +81,8 @@ export function parseR3(
input: string, options: {preserveWhitespaces?: boolean} = {}): Render3ParseResult { input: string, options: {preserveWhitespaces?: boolean} = {}): Render3ParseResult {
const htmlParser = new HtmlParser(); const htmlParser = new HtmlParser();
const parseResult = htmlParser.parse(input, 'path:://to/template', true); const parseResult =
htmlParser.parse(input, 'path:://to/template', {tokenizeExpansionForms: true});
if (parseResult.errors.length > 0) { if (parseResult.errors.length > 0) {
const msg = parseResult.errors.map(e => e.toString()).join('\n'); const msg = parseResult.errors.map(e => e.toString()).join('\n');

View File

@ -112,7 +112,7 @@ class LanguageServiceImpl implements LanguageService {
const parser = new TemplateParser( const parser = new TemplateParser(
config, this.host.resolver.getReflector(), expressionParser, config, this.host.resolver.getReflector(), expressionParser,
new DomElementSchemaRegistry(), htmlParser, null !, []); new DomElementSchemaRegistry(), htmlParser, null !, []);
const htmlResult = htmlParser.parse(template.source, '', true); const htmlResult = htmlParser.parse(template.source, '', {tokenizeExpansionForms: true});
const analyzedModules = this.host.getAnalyzedModules(); const analyzedModules = this.host.getAnalyzedModules();
let errors: Diagnostic[]|undefined = undefined; let errors: Diagnostic[]|undefined = undefined;
let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(template.type); let ngModule = analyzedModules.ngModuleByPipeOrDirective.get(template.type);

View File

@ -38,11 +38,7 @@ export function createLanguageServiceFromTypescript(
* syntactically incorrect templates. * syntactically incorrect templates.
*/ */
export class DummyHtmlParser extends HtmlParser { export class DummyHtmlParser extends HtmlParser {
parse( parse(): ParseTreeResult { return new ParseTreeResult([], []); }
source: string, url: string, parseExpansionForms: boolean = false,
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
return new ParseTreeResult([], []);
}
} }
/** /**