refactor(HmtlLexer): cleanup
This commit is contained in:
parent
e676fded21
commit
60e6f91a53
|
@ -126,8 +126,7 @@ export class CodeGenerator {
|
|||
const reflectorHost = new ReflectorHost(program, compilerHost, options, reflectorHostContext);
|
||||
const staticReflector = new StaticReflector(reflectorHost);
|
||||
StaticAndDynamicReflectionCapabilities.install(staticReflector);
|
||||
const expressionParser = new Parser(new Lexer());
|
||||
const htmlParser = new HtmlParser(expressionParser);
|
||||
const htmlParser = new HtmlParser();
|
||||
const config = new compiler.CompilerConfig({
|
||||
genDebugInfo: options.debug === true,
|
||||
defaultEncapsulation: ViewEncapsulation.Emulated,
|
||||
|
@ -135,6 +134,7 @@ export class CodeGenerator {
|
|||
useJit: false
|
||||
});
|
||||
const normalizer = new DirectiveNormalizer(xhr, urlResolver, htmlParser, config);
|
||||
const expressionParser = new Parser(new Lexer());
|
||||
const tmplParser = new TemplateParser(
|
||||
expressionParser, new DomElementSchemaRegistry(), htmlParser,
|
||||
/*console*/ null, []);
|
||||
|
|
|
@ -137,8 +137,7 @@ class Extractor {
|
|||
const reflectorHost = new ReflectorHost(program, compilerHost, options);
|
||||
const staticReflector = new StaticReflector(reflectorHost);
|
||||
StaticAndDynamicReflectionCapabilities.install(staticReflector);
|
||||
const expressionParser = new Parser(new Lexer());
|
||||
const htmlParser = new HtmlParser(expressionParser);
|
||||
const htmlParser = new HtmlParser();
|
||||
const config = new compiler.CompilerConfig({
|
||||
genDebugInfo: true,
|
||||
defaultEncapsulation: ViewEncapsulation.Emulated,
|
||||
|
@ -146,6 +145,7 @@ class Extractor {
|
|||
useJit: false
|
||||
});
|
||||
const normalizer = new DirectiveNormalizer(xhr, urlResolver, htmlParser, config);
|
||||
const expressionParser = new Parser(new Lexer());
|
||||
const resolver = new CompileMetadataResolver(
|
||||
new compiler.DirectiveResolver(staticReflector), new compiler.PipeResolver(staticReflector),
|
||||
new compiler.ViewResolver(staticReflector), config, staticReflector);
|
||||
|
|
|
@ -28,7 +28,8 @@ const INTERPOLATION_BLACKLIST_REGEXPS = [
|
|||
/^\s*$/, // empty
|
||||
/[<>]/, // html tag
|
||||
/^[{}]$/, // i18n expansion
|
||||
/&(#|[a-z])/i, // character reference
|
||||
/&(#|[a-z])/i, // character reference,
|
||||
/^\/\//, // comment
|
||||
];
|
||||
|
||||
export function assertInterpolationSymbols(identifier: string, value: any): void {
|
||||
|
|
|
@ -14,6 +14,7 @@ import {CompileDirectiveMetadata, CompileStylesheetMetadata, CompileTemplateMeta
|
|||
import {CompilerConfig} from './config';
|
||||
import {HtmlAstVisitor, HtmlAttrAst, HtmlCommentAst, HtmlElementAst, HtmlExpansionAst, HtmlExpansionCaseAst, HtmlTextAst, htmlVisitAll} from './html_ast';
|
||||
import {HtmlParser} from './html_parser';
|
||||
import {InterpolationConfig} from './interpolation_config';
|
||||
import {extractStyleUrls, isStyleUrlResolvable} from './style_url_resolver';
|
||||
import {PreparsedElementType, preparseElement} from './template_preparser';
|
||||
import {UrlResolver} from './url_resolver';
|
||||
|
|
|
@ -7,8 +7,7 @@
|
|||
*/
|
||||
|
||||
import * as chars from './chars';
|
||||
import {Parser as ExpressionParser} from './expression_parser/parser';
|
||||
import {NumberWrapper, StringWrapper, isBlank, isPresent} from './facade/lang';
|
||||
import {isBlank, isPresent} from './facade/lang';
|
||||
import {HtmlTagContentType, NAMED_ENTITIES, getHtmlTagDefinition} from './html_tags';
|
||||
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
|
||||
import {ParseError, ParseLocation, ParseSourceFile, ParseSourceSpan} from './parse_util';
|
||||
|
@ -33,7 +32,6 @@ export enum HtmlTokenType {
|
|||
EXPANSION_CASE_EXP_START,
|
||||
EXPANSION_CASE_EXP_END,
|
||||
EXPANSION_FORM_END,
|
||||
INTERPOLATION,
|
||||
EOF
|
||||
}
|
||||
|
||||
|
@ -53,11 +51,10 @@ export class HtmlTokenizeResult {
|
|||
}
|
||||
|
||||
export function tokenizeHtml(
|
||||
sourceContent: string, sourceUrl: string, parser: ExpressionParser,
|
||||
tokenizeExpansionForms: boolean = false,
|
||||
sourceContent: string, sourceUrl: string, tokenizeExpansionForms: boolean = false,
|
||||
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): HtmlTokenizeResult {
|
||||
return new _HtmlTokenizer(
|
||||
new ParseSourceFile(sourceContent, sourceUrl), tokenizeExpansionForms, parser,
|
||||
new ParseSourceFile(sourceContent, sourceUrl), tokenizeExpansionForms,
|
||||
interpolationConfig)
|
||||
.tokenize();
|
||||
}
|
||||
|
@ -65,7 +62,7 @@ export function tokenizeHtml(
|
|||
const _CR_OR_CRLF_REGEXP = /\r\n?/g;
|
||||
|
||||
function _unexpectedCharacterErrorMsg(charCode: number): string {
|
||||
var char = charCode === chars.$EOF ? 'EOF' : StringWrapper.fromCharCode(charCode);
|
||||
const char = charCode === chars.$EOF ? 'EOF' : String.fromCharCode(charCode);
|
||||
return `Unexpected character "${char}"`;
|
||||
}
|
||||
|
||||
|
@ -98,12 +95,10 @@ class _HtmlTokenizer {
|
|||
/**
|
||||
* @param _file The html source
|
||||
* @param _tokenizeIcu Whether to tokenize ICU messages (considered as text nodes when false)
|
||||
* @param _expressionParser Used to check syntax of interpolations
|
||||
* @param _interpolationConfig
|
||||
*/
|
||||
constructor(
|
||||
private _file: ParseSourceFile, private _tokenizeIcu: boolean,
|
||||
private _expressionParser: ExpressionParser,
|
||||
private _interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) {
|
||||
this._input = _file.content;
|
||||
this._length = _file.content.length;
|
||||
|
@ -115,12 +110,12 @@ class _HtmlTokenizer {
|
|||
// In order to keep the original position in the source, we can not
|
||||
// pre-process it.
|
||||
// Instead CRs are processed right before instantiating the tokens.
|
||||
return StringWrapper.replaceAll(content, _CR_OR_CRLF_REGEXP, '\n');
|
||||
return content.replace(_CR_OR_CRLF_REGEXP, '\n');
|
||||
}
|
||||
|
||||
tokenize(): HtmlTokenizeResult {
|
||||
while (this._peek !== chars.$EOF) {
|
||||
var start = this._getLocation();
|
||||
const start = this._getLocation();
|
||||
try {
|
||||
if (this._attemptCharCode(chars.$LT)) {
|
||||
if (this._attemptCharCode(chars.$BANG)) {
|
||||
|
@ -157,7 +152,7 @@ class _HtmlTokenizer {
|
|||
* @internal
|
||||
*/
|
||||
private _tokenizeExpansionForm(): boolean {
|
||||
if (isExpansionFormStart(this._input, this._index, this._interpolationConfig.start)) {
|
||||
if (isExpansionFormStart(this._input, this._index, this._interpolationConfig)) {
|
||||
this._consumeExpansionFormStart();
|
||||
return true;
|
||||
}
|
||||
|
@ -186,29 +181,19 @@ class _HtmlTokenizer {
|
|||
return new ParseLocation(this._file, this._index, this._line, this._column);
|
||||
}
|
||||
|
||||
private _getSpan(start?: ParseLocation, end?: ParseLocation): ParseSourceSpan {
|
||||
if (isBlank(start)) {
|
||||
start = this._getLocation();
|
||||
}
|
||||
if (isBlank(end)) {
|
||||
end = this._getLocation();
|
||||
}
|
||||
private _getSpan(
|
||||
start: ParseLocation = this._getLocation(),
|
||||
end: ParseLocation = this._getLocation()): ParseSourceSpan {
|
||||
return new ParseSourceSpan(start, end);
|
||||
}
|
||||
|
||||
private _beginToken(type: HtmlTokenType, start: ParseLocation = null) {
|
||||
if (isBlank(start)) {
|
||||
start = this._getLocation();
|
||||
}
|
||||
private _beginToken(type: HtmlTokenType, start: ParseLocation = this._getLocation()) {
|
||||
this._currentTokenStart = start;
|
||||
this._currentTokenType = type;
|
||||
}
|
||||
|
||||
private _endToken(parts: string[], end: ParseLocation = null): HtmlToken {
|
||||
if (isBlank(end)) {
|
||||
end = this._getLocation();
|
||||
}
|
||||
var token = new HtmlToken(
|
||||
private _endToken(parts: string[], end: ParseLocation = this._getLocation()): HtmlToken {
|
||||
const token = new HtmlToken(
|
||||
this._currentTokenType, parts, new ParseSourceSpan(this._currentTokenStart, end));
|
||||
this.tokens.push(token);
|
||||
this._currentTokenStart = null;
|
||||
|
@ -217,7 +202,7 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _createError(msg: string, span: ParseSourceSpan): _ControlFlowError {
|
||||
var error = new HtmlTokenError(msg, this._currentTokenType, span);
|
||||
const error = new HtmlTokenError(msg, this._currentTokenType, span);
|
||||
this._currentTokenStart = null;
|
||||
this._currentTokenType = null;
|
||||
return new _ControlFlowError(error);
|
||||
|
@ -234,11 +219,9 @@ class _HtmlTokenizer {
|
|||
this._column++;
|
||||
}
|
||||
this._index++;
|
||||
this._peek = this._index >= this._length ? chars.$EOF :
|
||||
StringWrapper.charCodeAt(this._input, this._index);
|
||||
this._nextPeek = this._index + 1 >= this._length ?
|
||||
chars.$EOF :
|
||||
StringWrapper.charCodeAt(this._input, this._index + 1);
|
||||
this._peek = this._index >= this._length ? chars.$EOF : this._input.charCodeAt(this._index);
|
||||
this._nextPeek =
|
||||
this._index + 1 >= this._length ? chars.$EOF : this._input.charCodeAt(this._index + 1);
|
||||
}
|
||||
|
||||
private _attemptCharCode(charCode: number): boolean {
|
||||
|
@ -258,7 +241,7 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _requireCharCode(charCode: number) {
|
||||
var location = this._getLocation();
|
||||
const location = this._getLocation();
|
||||
if (!this._attemptCharCode(charCode)) {
|
||||
throw this._createError(
|
||||
_unexpectedCharacterErrorMsg(this._peek), this._getSpan(location, location));
|
||||
|
@ -271,8 +254,8 @@ class _HtmlTokenizer {
|
|||
return false;
|
||||
}
|
||||
const initialPosition = this._savePosition();
|
||||
for (var i = 0; i < len; i++) {
|
||||
if (!this._attemptCharCode(StringWrapper.charCodeAt(chars, i))) {
|
||||
for (let i = 0; i < len; i++) {
|
||||
if (!this._attemptCharCode(chars.charCodeAt(i))) {
|
||||
// If attempting to parse the string fails, we want to reset the parser
|
||||
// to where it was before the attempt
|
||||
this._restorePosition(initialPosition);
|
||||
|
@ -283,8 +266,8 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _attemptStrCaseInsensitive(chars: string): boolean {
|
||||
for (var i = 0; i < chars.length; i++) {
|
||||
if (!this._attemptCharCodeCaseInsensitive(StringWrapper.charCodeAt(chars, i))) {
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
if (!this._attemptCharCodeCaseInsensitive(chars.charCodeAt(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -292,7 +275,7 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _requireStr(chars: string) {
|
||||
var location = this._getLocation();
|
||||
const location = this._getLocation();
|
||||
if (!this._attemptStr(chars)) {
|
||||
throw this._createError(_unexpectedCharacterErrorMsg(this._peek), this._getSpan(location));
|
||||
}
|
||||
|
@ -305,7 +288,7 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _requireCharCodeUntilFn(predicate: (code: number) => boolean, len: number) {
|
||||
var start = this._getLocation();
|
||||
const start = this._getLocation();
|
||||
this._attemptCharCodeUntilFn(predicate);
|
||||
if (this._index - start.offset < len) {
|
||||
throw this._createError(
|
||||
|
@ -323,14 +306,14 @@ class _HtmlTokenizer {
|
|||
if (decodeEntities && this._peek === chars.$AMPERSAND) {
|
||||
return this._decodeEntity();
|
||||
} else {
|
||||
var index = this._index;
|
||||
const index = this._index;
|
||||
this._advance();
|
||||
return this._input[index];
|
||||
}
|
||||
}
|
||||
|
||||
private _decodeEntity(): string {
|
||||
var start = this._getLocation();
|
||||
const start = this._getLocation();
|
||||
this._advance();
|
||||
if (this._attemptCharCode(chars.$HASH)) {
|
||||
let isHex = this._attemptCharCode(chars.$x) || this._attemptCharCode(chars.$X);
|
||||
|
@ -342,8 +325,8 @@ class _HtmlTokenizer {
|
|||
this._advance();
|
||||
let strNum = this._input.substring(numberStart, this._index - 1);
|
||||
try {
|
||||
let charCode = NumberWrapper.parseInt(strNum, isHex ? 16 : 10);
|
||||
return StringWrapper.fromCharCode(charCode);
|
||||
let charCode = parseInt(strNum, isHex ? 16 : 10);
|
||||
return String.fromCharCode(charCode);
|
||||
} catch (e) {
|
||||
let entity = this._input.substring(start.offset + 1, this._index - 1);
|
||||
throw this._createError(_unknownEntityErrorMsg(entity), this._getSpan(start));
|
||||
|
@ -367,11 +350,11 @@ class _HtmlTokenizer {
|
|||
|
||||
private _consumeRawText(
|
||||
decodeEntities: boolean, firstCharOfEnd: number, attemptEndRest: () => boolean): HtmlToken {
|
||||
var tagCloseStart: ParseLocation;
|
||||
var textStart = this._getLocation();
|
||||
let tagCloseStart: ParseLocation;
|
||||
const textStart = this._getLocation();
|
||||
this._beginToken(
|
||||
decodeEntities ? HtmlTokenType.ESCAPABLE_RAW_TEXT : HtmlTokenType.RAW_TEXT, textStart);
|
||||
var parts: string[] = [];
|
||||
const parts: string[] = [];
|
||||
while (true) {
|
||||
tagCloseStart = this._getLocation();
|
||||
if (this._attemptCharCode(firstCharOfEnd) && attemptEndRest()) {
|
||||
|
@ -392,7 +375,7 @@ class _HtmlTokenizer {
|
|||
this._beginToken(HtmlTokenType.COMMENT_START, start);
|
||||
this._requireCharCode(chars.$MINUS);
|
||||
this._endToken([]);
|
||||
var textToken = this._consumeRawText(false, chars.$MINUS, () => this._attemptStr('->'));
|
||||
const textToken = this._consumeRawText(false, chars.$MINUS, () => this._attemptStr('->'));
|
||||
this._beginToken(HtmlTokenType.COMMENT_END, textToken.sourceSpan.end);
|
||||
this._endToken([]);
|
||||
}
|
||||
|
@ -401,7 +384,7 @@ class _HtmlTokenizer {
|
|||
this._beginToken(HtmlTokenType.CDATA_START, start);
|
||||
this._requireStr('CDATA[');
|
||||
this._endToken([]);
|
||||
var textToken = this._consumeRawText(false, chars.$RBRACKET, () => this._attemptStr(']>'));
|
||||
const textToken = this._consumeRawText(false, chars.$RBRACKET, () => this._attemptStr(']>'));
|
||||
this._beginToken(HtmlTokenType.CDATA_END, textToken.sourceSpan.end);
|
||||
this._endToken([]);
|
||||
}
|
||||
|
@ -414,12 +397,12 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _consumePrefixAndName(): string[] {
|
||||
var nameOrPrefixStart = this._index;
|
||||
var prefix: string = null;
|
||||
const nameOrPrefixStart = this._index;
|
||||
let prefix: string = null;
|
||||
while (this._peek !== chars.$COLON && !isPrefixEnd(this._peek)) {
|
||||
this._advance();
|
||||
}
|
||||
var nameStart: number;
|
||||
let nameStart: number;
|
||||
if (this._peek === chars.$COLON) {
|
||||
this._advance();
|
||||
prefix = this._input.substring(nameOrPrefixStart, this._index - 1);
|
||||
|
@ -428,7 +411,7 @@ class _HtmlTokenizer {
|
|||
nameStart = nameOrPrefixStart;
|
||||
}
|
||||
this._requireCharCodeUntilFn(isNameEnd, this._index === nameStart ? 1 : 0);
|
||||
var name = this._input.substring(nameStart, this._index);
|
||||
const name = this._input.substring(nameStart, this._index);
|
||||
return [prefix, name];
|
||||
}
|
||||
|
||||
|
@ -439,7 +422,7 @@ class _HtmlTokenizer {
|
|||
if (!chars.isAsciiLetter(this._peek)) {
|
||||
throw this._createError(_unexpectedCharacterErrorMsg(this._peek), this._getSpan());
|
||||
}
|
||||
var nameStart = this._index;
|
||||
const nameStart = this._index;
|
||||
this._consumeTagOpenStart(start);
|
||||
lowercaseTagName = this._input.substring(nameStart, this._index).toLowerCase();
|
||||
this._attemptCharCodeUntilFn(isNotWhitespace);
|
||||
|
@ -466,7 +449,7 @@ class _HtmlTokenizer {
|
|||
throw e;
|
||||
}
|
||||
|
||||
var contentTokenType = getHtmlTagDefinition(lowercaseTagName).contentType;
|
||||
const contentTokenType = getHtmlTagDefinition(lowercaseTagName).contentType;
|
||||
if (contentTokenType === HtmlTagContentType.RAW_TEXT) {
|
||||
this._consumeRawTextWithTagClose(lowercaseTagName, false);
|
||||
} else if (contentTokenType === HtmlTagContentType.ESCAPABLE_RAW_TEXT) {
|
||||
|
@ -475,13 +458,12 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _consumeRawTextWithTagClose(lowercaseTagName: string, decodeEntities: boolean) {
|
||||
var textToken = this._consumeRawText(decodeEntities, chars.$LT, () => {
|
||||
const textToken = this._consumeRawText(decodeEntities, chars.$LT, () => {
|
||||
if (!this._attemptCharCode(chars.$SLASH)) return false;
|
||||
this._attemptCharCodeUntilFn(isNotWhitespace);
|
||||
if (!this._attemptStrCaseInsensitive(lowercaseTagName)) return false;
|
||||
this._attemptCharCodeUntilFn(isNotWhitespace);
|
||||
if (!this._attemptCharCode(chars.$GT)) return false;
|
||||
return true;
|
||||
return this._attemptCharCode(chars.$GT);
|
||||
});
|
||||
this._beginToken(HtmlTokenType.TAG_CLOSE, textToken.sourceSpan.end);
|
||||
this._endToken([null, lowercaseTagName]);
|
||||
|
@ -489,13 +471,13 @@ class _HtmlTokenizer {
|
|||
|
||||
private _consumeTagOpenStart(start: ParseLocation) {
|
||||
this._beginToken(HtmlTokenType.TAG_OPEN_START, start);
|
||||
var parts = this._consumePrefixAndName();
|
||||
const parts = this._consumePrefixAndName();
|
||||
this._endToken(parts);
|
||||
}
|
||||
|
||||
private _consumeAttributeName() {
|
||||
this._beginToken(HtmlTokenType.ATTR_NAME);
|
||||
var prefixAndName = this._consumePrefixAndName();
|
||||
const prefixAndName = this._consumePrefixAndName();
|
||||
this._endToken(prefixAndName);
|
||||
}
|
||||
|
||||
|
@ -520,8 +502,8 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _consumeTagOpenEnd() {
|
||||
var tokenType = this._attemptCharCode(chars.$SLASH) ? HtmlTokenType.TAG_OPEN_END_VOID :
|
||||
HtmlTokenType.TAG_OPEN_END;
|
||||
const tokenType = this._attemptCharCode(chars.$SLASH) ? HtmlTokenType.TAG_OPEN_END_VOID :
|
||||
HtmlTokenType.TAG_OPEN_END;
|
||||
this._beginToken(tokenType);
|
||||
this._requireCharCode(chars.$GT);
|
||||
this._endToken([]);
|
||||
|
@ -542,7 +524,7 @@ class _HtmlTokenizer {
|
|||
this._endToken([]);
|
||||
|
||||
this._beginToken(HtmlTokenType.RAW_TEXT, this._getLocation());
|
||||
let condition = this._readUntil(chars.$COMMA);
|
||||
const condition = this._readUntil(chars.$COMMA);
|
||||
this._endToken([condition], this._getLocation());
|
||||
this._requireCharCode(chars.$COMMA);
|
||||
this._attemptCharCodeUntilFn(isNotWhitespace);
|
||||
|
@ -558,7 +540,7 @@ class _HtmlTokenizer {
|
|||
|
||||
private _consumeExpansionCaseStart() {
|
||||
this._beginToken(HtmlTokenType.EXPANSION_CASE_VALUE, this._getLocation());
|
||||
let value = this._readUntil(chars.$LBRACE).trim();
|
||||
const value = this._readUntil(chars.$LBRACE).trim();
|
||||
this._endToken([value], this._getLocation());
|
||||
this._attemptCharCodeUntilFn(isNotWhitespace);
|
||||
|
||||
|
@ -588,10 +570,9 @@ class _HtmlTokenizer {
|
|||
}
|
||||
|
||||
private _consumeText() {
|
||||
var start = this._getLocation();
|
||||
const start = this._getLocation();
|
||||
this._beginToken(HtmlTokenType.TEXT, start);
|
||||
|
||||
var parts: string[] = [];
|
||||
const parts: string[] = [];
|
||||
|
||||
do {
|
||||
if (this._attemptStr(this._interpolationConfig.start)) {
|
||||
|
@ -613,13 +594,13 @@ class _HtmlTokenizer {
|
|||
return true;
|
||||
}
|
||||
|
||||
if (this._tokenizeIcu) {
|
||||
if (isExpansionFormStart(this._input, this._index, this._interpolationConfig.start)) {
|
||||
if (this._tokenizeIcu && !this._inInterpolation) {
|
||||
if (isExpansionFormStart(this._input, this._index, this._interpolationConfig)) {
|
||||
// start of an expansion form
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this._peek === chars.$RBRACE && !this._inInterpolation && this._isInExpansionCase()) {
|
||||
if (this._peek === chars.$RBRACE && this._isInExpansionCase()) {
|
||||
// end of and expansion case
|
||||
return true;
|
||||
}
|
||||
|
@ -685,9 +666,10 @@ function isNamedEntityEnd(code: number): boolean {
|
|||
return code == chars.$SEMICOLON || code == chars.$EOF || !chars.isAsciiLetter(code);
|
||||
}
|
||||
|
||||
function isExpansionFormStart(input: string, offset: number, interpolationStart: string): boolean {
|
||||
function isExpansionFormStart(
|
||||
input: string, offset: number, interpolationConfig: InterpolationConfig): boolean {
|
||||
return input.charCodeAt(offset) == chars.$LBRACE &&
|
||||
input.indexOf(interpolationStart, offset) != offset;
|
||||
input.indexOf(interpolationConfig.start, offset) != offset;
|
||||
}
|
||||
|
||||
function isExpansionCaseStart(peek: number): boolean {
|
||||
|
|
|
@ -30,14 +30,12 @@ export class HtmlParseTreeResult {
|
|||
|
||||
@Injectable()
|
||||
export class HtmlParser {
|
||||
constructor(public _expressionParser: ExpressionParser) {}
|
||||
|
||||
parse(
|
||||
sourceContent: string, sourceUrl: string, parseExpansionForms: boolean = false,
|
||||
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG):
|
||||
HtmlParseTreeResult {
|
||||
var tokensAndErrors = tokenizeHtml(
|
||||
sourceContent, sourceUrl, this._expressionParser, parseExpansionForms, interpolationConfig);
|
||||
var tokensAndErrors =
|
||||
tokenizeHtml(sourceContent, sourceUrl, parseExpansionForms, interpolationConfig);
|
||||
var treeAndErrors = new TreeBuilder(tokensAndErrors.tokens).build();
|
||||
return new HtmlParseTreeResult(
|
||||
treeAndErrors.rootNodes,
|
||||
|
|
|
@ -6,9 +6,6 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser';
|
||||
|
||||
import {RegExpWrapper, isBlank, isPresent} from '../facade/lang';
|
||||
import {HtmlAst, HtmlElementAst} from '../html_ast';
|
||||
import {HtmlParser} from '../html_parser';
|
||||
|
@ -37,9 +34,7 @@ export class XmbDeserializationError extends ParseError {
|
|||
}
|
||||
|
||||
export function deserializeXmb(content: string, url: string): XmbDeserializationResult {
|
||||
const expLexer = new ExpressionLexer();
|
||||
const expParser = new ExpressionParser(expLexer);
|
||||
const parser = new HtmlParser(expParser);
|
||||
const parser = new HtmlParser();
|
||||
const normalizedContent = _expandPlaceholder(content.trim());
|
||||
const parsed = parser.parse(normalizedContent, url);
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AnimationAnimateMetadata, AnimationEntryMetadata, AnimationGroupMetadata, AnimationKeyframesSequenceMetadata, AnimationMetadata, AnimationStateDeclarationMetadata, AnimationStateMetadata, AnimationStateTransitionMetadata, AnimationStyleMetadata, AnimationWithStepsMetadata, AppModuleMetadata, AttributeMetadata, ComponentMetadata, HostMetadata, Inject, InjectMetadata, Injectable, Optional, OptionalMetadata, Provider, QueryMetadata, SelfMetadata, SkipSelfMetadata, ViewMetadata, ViewQueryMetadata, resolveForwardRef} from '@angular/core';
|
||||
import {AnimationAnimateMetadata, AnimationEntryMetadata, AnimationGroupMetadata, AnimationKeyframesSequenceMetadata, AnimationMetadata, AnimationStateDeclarationMetadata, AnimationStateMetadata, AnimationStateTransitionMetadata, AnimationStyleMetadata, AnimationWithStepsMetadata, AppModuleMetadata, AttributeMetadata, ChangeDetectionStrategy, ComponentMetadata, HostMetadata, Inject, InjectMetadata, Injectable, Optional, OptionalMetadata, Provider, QueryMetadata, SelfMetadata, SkipSelfMetadata, ViewMetadata, ViewQueryMetadata, resolveForwardRef} from '@angular/core';
|
||||
|
||||
import {LIFECYCLE_HOOKS_VALUES, ReflectorReader, createProvider, isProviderLiteral, reflector} from '../core_private';
|
||||
import {StringMapWrapper} from '../src/facade/collection';
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser';
|
||||
import {HtmlToken, HtmlTokenError, HtmlTokenType, tokenizeHtml} from '@angular/compiler/src/html_lexer';
|
||||
import {InterpolationConfig} from '@angular/compiler/src/interpolation_config';
|
||||
import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '@angular/compiler/src/parse_util';
|
||||
|
@ -18,33 +16,41 @@ export function main() {
|
|||
describe('line/column numbers', () => {
|
||||
it('should work without newlines', () => {
|
||||
expect(tokenizeAndHumanizeLineColumn('<t>a</t>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '0:2'],
|
||||
[HtmlTokenType.TEXT, '0:3'], [HtmlTokenType.TAG_CLOSE, '0:4'],
|
||||
[HtmlTokenType.EOF, '0:8']
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '0:2'],
|
||||
[HtmlTokenType.TEXT, '0:3'],
|
||||
[HtmlTokenType.TAG_CLOSE, '0:4'],
|
||||
[HtmlTokenType.EOF, '0:8'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with one newline', () => {
|
||||
expect(tokenizeAndHumanizeLineColumn('<t>\na</t>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '0:2'],
|
||||
[HtmlTokenType.TEXT, '0:3'], [HtmlTokenType.TAG_CLOSE, '1:1'],
|
||||
[HtmlTokenType.EOF, '1:5']
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '0:2'],
|
||||
[HtmlTokenType.TEXT, '0:3'],
|
||||
[HtmlTokenType.TAG_CLOSE, '1:1'],
|
||||
[HtmlTokenType.EOF, '1:5'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with multiple newlines', () => {
|
||||
expect(tokenizeAndHumanizeLineColumn('<t\n>\na</t>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '1:0'],
|
||||
[HtmlTokenType.TEXT, '1:1'], [HtmlTokenType.TAG_CLOSE, '2:1'],
|
||||
[HtmlTokenType.EOF, '2:5']
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '1:0'],
|
||||
[HtmlTokenType.TEXT, '1:1'],
|
||||
[HtmlTokenType.TAG_CLOSE, '2:1'],
|
||||
[HtmlTokenType.EOF, '2:5'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with CR and LF', () => {
|
||||
expect(tokenizeAndHumanizeLineColumn('<t\n>\r\na\r</t>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '1:0'],
|
||||
[HtmlTokenType.TEXT, '1:1'], [HtmlTokenType.TAG_CLOSE, '2:1'],
|
||||
[HtmlTokenType.EOF, '2:5']
|
||||
[HtmlTokenType.TAG_OPEN_START, '0:0'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '1:0'],
|
||||
[HtmlTokenType.TEXT, '1:1'],
|
||||
[HtmlTokenType.TAG_CLOSE, '2:1'],
|
||||
[HtmlTokenType.EOF, '2:5'],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -52,15 +58,19 @@ export function main() {
|
|||
describe('comments', () => {
|
||||
it('should parse comments', () => {
|
||||
expect(tokenizeAndHumanizeParts('<!--t\ne\rs\r\nt-->')).toEqual([
|
||||
[HtmlTokenType.COMMENT_START], [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.COMMENT_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.COMMENT_START],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.COMMENT_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations',
|
||||
() => {expect(tokenizeAndHumanizeSourceSpans('<!--t\ne\rs\r\nt-->')).toEqual([
|
||||
[HtmlTokenType.COMMENT_START, '<!--'], [HtmlTokenType.RAW_TEXT, 't\ne\rs\r\nt'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.COMMENT_START, '<!--'],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\rs\r\nt'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
])});
|
||||
|
||||
it('should report <!- without -', () => {
|
||||
|
@ -77,15 +87,19 @@ export function main() {
|
|||
|
||||
it('should accept comments finishing by too many dashes (even number)', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<!-- test ---->')).toEqual([
|
||||
[HtmlTokenType.COMMENT_START, '<!--'], [HtmlTokenType.RAW_TEXT, ' test --'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.COMMENT_START, '<!--'],
|
||||
[HtmlTokenType.RAW_TEXT, ' test --'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should accept comments finishing by too many dashes (odd number)', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<!-- test --->')).toEqual([
|
||||
[HtmlTokenType.COMMENT_START, '<!--'], [HtmlTokenType.RAW_TEXT, ' test -'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.COMMENT_START, '<!--'],
|
||||
[HtmlTokenType.RAW_TEXT, ' test -'],
|
||||
[HtmlTokenType.COMMENT_END, '-->'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -93,13 +107,15 @@ export function main() {
|
|||
describe('doctype', () => {
|
||||
it('should parse doctypes', () => {
|
||||
expect(tokenizeAndHumanizeParts('<!doctype html>')).toEqual([
|
||||
[HtmlTokenType.DOC_TYPE, 'doctype html'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.DOC_TYPE, 'doctype html'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<!doctype html>')).toEqual([
|
||||
[HtmlTokenType.DOC_TYPE, '<!doctype html>'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.DOC_TYPE, '<!doctype html>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -113,15 +129,19 @@ export function main() {
|
|||
describe('CDATA', () => {
|
||||
it('should parse CDATA', () => {
|
||||
expect(tokenizeAndHumanizeParts('<![CDATA[t\ne\rs\r\nt]]>')).toEqual([
|
||||
[HtmlTokenType.CDATA_START], [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.CDATA_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.CDATA_START],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.CDATA_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<![CDATA[t\ne\rs\r\nt]]>')).toEqual([
|
||||
[HtmlTokenType.CDATA_START, '<![CDATA['], [HtmlTokenType.RAW_TEXT, 't\ne\rs\r\nt'],
|
||||
[HtmlTokenType.CDATA_END, ']]>'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.CDATA_START, '<![CDATA['],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\rs\r\nt'],
|
||||
[HtmlTokenType.CDATA_END, ']]>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -141,36 +161,41 @@ export function main() {
|
|||
describe('open tags', () => {
|
||||
it('should parse open tags without prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('<test>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse namespace prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('<ns1:test>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, 'ns1', 'test'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, 'ns1', 'test'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse void tags', () => {
|
||||
expect(tokenizeAndHumanizeParts('<test/>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END_VOID],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'],
|
||||
[HtmlTokenType.TAG_OPEN_END_VOID],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should allow whitespace after the tag name', () => {
|
||||
expect(tokenizeAndHumanizeParts('<test >')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'test'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<test>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '<test'], [HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_OPEN_START, '<test'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -179,88 +204,134 @@ export function main() {
|
|||
describe('attributes', () => {
|
||||
it('should parse attributes without prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="{{v}}" b="s{{m}}e" c="s{{m//c}}e">')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, '{{v}}'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'b'],
|
||||
[HtmlTokenType.ATTR_VALUE, 's{{m}}e'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'c'],
|
||||
[HtmlTokenType.ATTR_VALUE, 's{{m//c}}e'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t ns1:a>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, 'ns1', 'a'],
|
||||
[HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, 'ns1', 'a'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes whose prefix is not valid', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t (ns1:a)>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, '(ns1:a)'],
|
||||
[HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, '(ns1:a)'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with single quote value', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a=\'b\'>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with double quote value', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="b">')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with unquoted value', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a=b>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should allow whitespace', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a = b >')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with entities in values', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="AA">')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'AA'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'AA'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not decode entities without trailing ";"', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="&" b="c&&d">')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, '&'], [HtmlTokenType.ATTR_NAME, null, 'b'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'c&&d'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, '&'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'b'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'c&&d'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with "&" in values', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="b && c &">')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b && c &'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b && c &'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse values with CR and LF', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a=\'t\ne\rs\r\nt\'>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 't\ne\ns\nt'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 't'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('<t a=b>')).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '<t'], [HtmlTokenType.ATTR_NAME, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_OPEN_START, '<t'],
|
||||
[HtmlTokenType.ATTR_NAME, 'a'],
|
||||
[HtmlTokenType.ATTR_VALUE, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -269,25 +340,29 @@ export function main() {
|
|||
describe('closing tags', () => {
|
||||
it('should parse closing tags without prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('</test>')).toEqual([
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'test'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'test'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse closing tags with prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('</ns1:test>')).toEqual([
|
||||
[HtmlTokenType.TAG_CLOSE, 'ns1', 'test'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_CLOSE, 'ns1', 'test'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should allow whitespace', () => {
|
||||
expect(tokenizeAndHumanizeParts('</ test >')).toEqual([
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'test'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'test'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('</test>')).toEqual([
|
||||
[HtmlTokenType.TAG_CLOSE, '</test>'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_CLOSE, '</test>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -307,25 +382,29 @@ export function main() {
|
|||
describe('entities', () => {
|
||||
it('should parse named entities', () => {
|
||||
expect(tokenizeAndHumanizeParts('a&b')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'a&b'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse hexadecimal entities', () => {
|
||||
expect(tokenizeAndHumanizeParts('AA')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'AA'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'AA'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse decimal entities', () => {
|
||||
expect(tokenizeAndHumanizeParts('A')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'A'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'A'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('a&b')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TEXT, 'a&b'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -350,55 +429,57 @@ export function main() {
|
|||
describe('regular text', () => {
|
||||
it('should parse text', () => {
|
||||
expect(tokenizeAndHumanizeParts('a')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'a'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{ a }}')).toEqual([
|
||||
[HtmlTokenType.TEXT, '{{ a }}'], [HtmlTokenType.EOF]
|
||||
]);
|
||||
});
|
||||
|
||||
it('should detect interpolation end', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{value|filter:{params: {strict: true}}}}')).toEqual([
|
||||
[HtmlTokenType.TEXT, '{{ a }}'], [HtmlTokenType.EOF]
|
||||
expect(tokenizeAndHumanizeParts('{{ a }}b{{ c // comment }}')).toEqual([
|
||||
[HtmlTokenType.TEXT, '{{ a }}b{{ c // comment }}'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse interpolation with custom markers', () => {
|
||||
expect(tokenizeAndHumanizeParts('{% a %}', null, {start: '{%', end: '%}'})).toEqual([
|
||||
[HtmlTokenType.TEXT, '{% a %}'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, '{% a %}'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle CR & LF', () => {
|
||||
expect(tokenizeAndHumanizeParts('t\ne\rs\r\nt')).toEqual([
|
||||
[HtmlTokenType.TEXT, 't\ne\ns\nt'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse entities', () => {
|
||||
expect(tokenizeAndHumanizeParts('a&b')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'a&b'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse text starting with "&"', () => {
|
||||
expect(tokenizeAndHumanizeParts('a && b &')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a && b &'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'a && b &'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans('a')).toEqual([
|
||||
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TEXT, 'a'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should allow "<" in text nodes', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{ a < b ? c : d }}')).toEqual([
|
||||
[HtmlTokenType.TEXT, '{{ a < b ? c : d }}'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, '{{ a < b ? c : d }}'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
|
||||
expect(tokenizeAndHumanizeSourceSpans('<p>a<b</p>')).toEqual([
|
||||
|
@ -410,103 +491,124 @@ export function main() {
|
|||
]);
|
||||
|
||||
expect(tokenizeAndHumanizeParts('< a>')).toEqual([
|
||||
[HtmlTokenType.TEXT, '< a>'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, '< a>'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO(vicb): make the lexer aware of Angular expressions
|
||||
// see https://github.com/angular/angular/issues/5679
|
||||
it('should parse valid start tag in interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{ a <b && c > d }}')).toEqual([
|
||||
[HtmlTokenType.TEXT, '{{ a '], [HtmlTokenType.TAG_OPEN_START, null, 'b'],
|
||||
[HtmlTokenType.ATTR_NAME, null, '&&'], [HtmlTokenType.ATTR_NAME, null, 'c'],
|
||||
[HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.TEXT, ' d }}'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, '{{ a '],
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'b'],
|
||||
[HtmlTokenType.ATTR_NAME, null, '&&'],
|
||||
[HtmlTokenType.ATTR_NAME, null, 'c'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.TEXT, ' d }}'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('raw text', () => {
|
||||
it('should parse text', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<script>t\ne\rs\r\nt</script>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not detect entities', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<script>&</SCRIPT>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, '&'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore other opening tags', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<script>a<div></script>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 'a<div>'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 'a<div>'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore other closing tags', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<script>a</test></script>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 'a</test>'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'script'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.RAW_TEXT, 'a</test>'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'script'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans(`<script>a</script>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '<script'], [HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, '</script>'],
|
||||
[HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_OPEN_START, '<script'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.RAW_TEXT, 'a'],
|
||||
[HtmlTokenType.TAG_CLOSE, '</script>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('escapable raw text', () => {
|
||||
it('should parse text', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<title>t\ne\rs\r\nt</title>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 't\ne\ns\nt'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'title'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should detect entities', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<title>&</title>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, '&'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore other opening tags', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<title>a<div></title>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a<div>'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a<div>'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore other closing tags', () => {
|
||||
expect(tokenizeAndHumanizeParts(`<title>a</test></title>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a</test>'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'title'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a</test>'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'title'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store the locations', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans(`<title>a</title>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '<title'], [HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, '</title>'],
|
||||
[HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_OPEN_START, '<title'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'],
|
||||
[HtmlTokenType.TAG_CLOSE, '</title>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -516,65 +618,94 @@ export function main() {
|
|||
it('should parse an expansion form', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true))
|
||||
.toEqual([
|
||||
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, '=5'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'five'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, 'foo'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'bar'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'four'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=5'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'five'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, 'foo'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'bar'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion form with text elements surrounding it', () => {
|
||||
expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([
|
||||
[HtmlTokenType.TEXT, 'before'], [HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'], [HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], [HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'four'], [HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END], [HtmlTokenType.TEXT, 'after'], [HtmlTokenType.EOF]
|
||||
[HtmlTokenType.TEXT, 'before'],
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'four'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.TEXT, 'after'],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion forms with elements in it', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four <b>a</b>}}', true)).toEqual([
|
||||
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four '],
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'b'], [HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'b'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'four '],
|
||||
[HtmlTokenType.TAG_OPEN_START, null, 'b'],
|
||||
[HtmlTokenType.TAG_OPEN_END],
|
||||
[HtmlTokenType.TEXT, 'a'],
|
||||
[HtmlTokenType.TAG_CLOSE, null, 'b'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion forms with interpolation in it', () => {
|
||||
it('should parse an expansion forms containing an interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([
|
||||
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four {{a}}'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'four {{a}}'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse nested expansion forms', () => {
|
||||
expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true))
|
||||
.toEqual([
|
||||
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'one.two'],
|
||||
[HtmlTokenType.RAW_TEXT, 'three'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
|
||||
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'xx'],
|
||||
[HtmlTokenType.RAW_TEXT, 'yy'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=x'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'one'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_START],
|
||||
[HtmlTokenType.RAW_TEXT, 'xx'],
|
||||
[HtmlTokenType.RAW_TEXT, 'yy'],
|
||||
[HtmlTokenType.EXPANSION_CASE_VALUE, '=x'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_START],
|
||||
[HtmlTokenType.TEXT, 'one'],
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.TEXT, ' '],
|
||||
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF]
|
||||
[HtmlTokenType.EXPANSION_CASE_EXP_END],
|
||||
[HtmlTokenType.EXPANSION_FORM_END],
|
||||
[HtmlTokenType.EOF],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -594,8 +725,11 @@ export function main() {
|
|||
describe('unicode characters', () => {
|
||||
it('should support unicode characters', () => {
|
||||
expect(tokenizeAndHumanizeSourceSpans(`<p>İ</p>`)).toEqual([
|
||||
[HtmlTokenType.TAG_OPEN_START, '<p'], [HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.TEXT, 'İ'], [HtmlTokenType.TAG_CLOSE, '</p>'], [HtmlTokenType.EOF, '']
|
||||
[HtmlTokenType.TAG_OPEN_START, '<p'],
|
||||
[HtmlTokenType.TAG_OPEN_END, '>'],
|
||||
[HtmlTokenType.TEXT, 'İ'],
|
||||
[HtmlTokenType.TAG_CLOSE, '</p>'],
|
||||
[HtmlTokenType.EOF, ''],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -606,8 +740,7 @@ export function main() {
|
|||
function tokenizeWithoutErrors(
|
||||
input: string, tokenizeExpansionForms: boolean = false,
|
||||
interpolationConfig?: InterpolationConfig): HtmlToken[] {
|
||||
var tokenizeResult = tokenizeHtml(
|
||||
input, 'someUrl', _getExpressionParser(), tokenizeExpansionForms, interpolationConfig);
|
||||
var tokenizeResult = tokenizeHtml(input, 'someUrl', tokenizeExpansionForms, interpolationConfig);
|
||||
|
||||
if (tokenizeResult.errors.length > 0) {
|
||||
const errorString = tokenizeResult.errors.join('\n');
|
||||
|
@ -638,10 +771,6 @@ function tokenizeAndHumanizeLineColumn(input: string): any[] {
|
|||
}
|
||||
|
||||
function tokenizeAndHumanizeErrors(input: string): any[] {
|
||||
return tokenizeHtml(input, 'someUrl', _getExpressionParser())
|
||||
return tokenizeHtml(input, 'someUrl')
|
||||
.errors.map(e => [<any>e.tokenType, e.msg, humanizeLineColumn(e.span.start)]);
|
||||
}
|
||||
|
||||
function _getExpressionParser(): ExpressionParser {
|
||||
return new ExpressionParser(new ExpressionLexer());
|
||||
}
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser';
|
||||
import {HtmlAttrAst, HtmlCommentAst, HtmlElementAst, HtmlExpansionAst, HtmlExpansionCaseAst, HtmlTextAst} from '@angular/compiler/src/html_ast';
|
||||
import {HtmlTokenType} from '@angular/compiler/src/html_lexer';
|
||||
import {HtmlParseTreeResult, HtmlParser, HtmlTreeError} from '@angular/compiler/src/html_parser';
|
||||
|
@ -19,14 +17,8 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './html_as
|
|||
export function main() {
|
||||
describe('HtmlParser', () => {
|
||||
var parser: HtmlParser;
|
||||
var expLexer: ExpressionLexer;
|
||||
var expParser: ExpressionParser;
|
||||
|
||||
beforeEach(() => {
|
||||
expLexer = new ExpressionLexer();
|
||||
expParser = new ExpressionParser(expLexer);
|
||||
parser = new HtmlParser(expParser);
|
||||
});
|
||||
beforeEach(() => { parser = new HtmlParser(); });
|
||||
|
||||
describe('parse', () => {
|
||||
describe('text nodes', () => {
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser';
|
||||
import {HtmlAttrAst, HtmlElementAst, HtmlTextAst} from '@angular/compiler/src/html_ast';
|
||||
import {HtmlParser} from '@angular/compiler/src/html_parser';
|
||||
import {ExpansionResult, expandNodes} from '@angular/compiler/src/i18n/expander';
|
||||
|
@ -18,9 +16,7 @@ import {ddescribe, describe, expect, iit, it} from '@angular/core/testing/testin
|
|||
export function main() {
|
||||
describe('Expander', () => {
|
||||
function expand(template: string): ExpansionResult {
|
||||
const expLexer = new ExpressionLexer();
|
||||
const expParser = new ExpressionParser(expLexer);
|
||||
const htmlParser = new HtmlParser(expParser);
|
||||
const htmlParser = new HtmlParser();
|
||||
const res = htmlParser.parse(template, 'url', true);
|
||||
return expandNodes(res.rootNodes);
|
||||
}
|
||||
|
|
|
@ -26,14 +26,15 @@ export function main() {
|
|||
template: string, messages: {[key: string]: string}, implicitTags: string[] = [],
|
||||
implicitAttrs: {[k: string]: string[]} = {},
|
||||
interpolation?: InterpolationConfig): HtmlParseTreeResult {
|
||||
var expParser = new ExpressionParser(new ExpressionLexer());
|
||||
let htmlParser = new HtmlParser(expParser);
|
||||
let htmlParser = new HtmlParser();
|
||||
|
||||
let msgs = '';
|
||||
StringMapWrapper.forEach(
|
||||
messages, (v: string, k: string) => msgs += `<msg id="${k}">${v}</msg>`);
|
||||
let res = deserializeXmb(`<message-bundle>${msgs}</message-bundle>`, 'someUrl');
|
||||
|
||||
const expParser = new ExpressionParser(new ExpressionLexer());
|
||||
|
||||
return new I18nHtmlParser(
|
||||
htmlParser, expParser, res.content, res.messages, implicitTags, implicitAttrs)
|
||||
.parse(template, 'someurl', true, interpolation);
|
||||
|
|
|
@ -20,7 +20,7 @@ export function main() {
|
|||
|
||||
beforeEach(() => {
|
||||
const expParser = new ExpressionParser(new ExpressionLexer());
|
||||
const htmlParser = new HtmlParser(expParser);
|
||||
const htmlParser = new HtmlParser();
|
||||
// TODO: pass expression parser
|
||||
extractor = new MessageExtractor(htmlParser, expParser, ['i18n-tag'], {'i18n-el': ['trans']});
|
||||
});
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser';
|
||||
import {HtmlElementAst} from '@angular/compiler/src/html_ast';
|
||||
import {HtmlParser} from '@angular/compiler/src/html_parser';
|
||||
import {DomElementSchemaRegistry} from '@angular/compiler/src/schema/dom_element_schema_registry';
|
||||
|
@ -70,10 +68,8 @@ export function main() {
|
|||
});
|
||||
|
||||
it('should detect properties on namespaced elements', () => {
|
||||
const expLexer = new ExpressionLexer();
|
||||
const expParser = new ExpressionParser(expLexer);
|
||||
let htmlAst = new HtmlParser(expParser).parse('<svg:style>', 'TestComp');
|
||||
let nodeName = (<HtmlElementAst>htmlAst.rootNodes[0]).name;
|
||||
const htmlAst = new HtmlParser().parse('<svg:style>', 'TestComp');
|
||||
const nodeName = (<HtmlElementAst>htmlAst.rootNodes[0]).name;
|
||||
expect(registry.hasProperty(nodeName, 'type')).toBeTruthy();
|
||||
});
|
||||
|
||||
|
|
Loading…
Reference in New Issue