;" syntax', '0:0'
]]);
expect(tokenizeAndHumanizeErrors('asdf;')).toEqual([
[HtmlTokenType.TEXT, 'Unexpected character "s"', '0:3']
]);
expect(tokenizeAndHumanizeErrors('
sdf;')).toEqual([
[HtmlTokenType.TEXT, 'Unexpected character "s"', '0:4']
]);
expect(tokenizeAndHumanizeErrors('઼')).toEqual([
[HtmlTokenType.TEXT, 'Unexpected character "EOF"', '0:6']
]);
});
});
describe('regular text', () => {
it('should parse text', () => {
expect(tokenizeAndHumanizeParts('a')).toEqual([
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF]
]);
});
it('should parse interpolation', () => {
expect(tokenizeAndHumanizeParts('{{ a }}')).toEqual([
[HtmlTokenType.TEXT, '{{ a }}'], [HtmlTokenType.EOF]
]);
expect(tokenizeAndHumanizeParts('{% a %}', null, {start: '{%', end: '%}'})).toEqual([
[HtmlTokenType.TEXT, '{% a %}'], [HtmlTokenType.EOF]
]);
});
it('should handle CR & LF', () => {
expect(tokenizeAndHumanizeParts('t\ne\rs\r\nt')).toEqual([
[HtmlTokenType.TEXT, 't\ne\ns\nt'], [HtmlTokenType.EOF]
]);
});
it('should parse entities', () => {
expect(tokenizeAndHumanizeParts('a&b')).toEqual([
[HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF]
]);
});
it('should parse text starting with "&"', () => {
expect(tokenizeAndHumanizeParts('a && b &')).toEqual([
[HtmlTokenType.TEXT, 'a && b &'], [HtmlTokenType.EOF]
]);
});
it('should store the locations', () => {
expect(tokenizeAndHumanizeSourceSpans('a')).toEqual([
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF, '']
]);
});
it('should allow "<" in text nodes', () => {
expect(tokenizeAndHumanizeParts('{{ a < b ? c : d }}')).toEqual([
[HtmlTokenType.TEXT, '{{ a < b ? c : d }}'], [HtmlTokenType.EOF]
]);
expect(tokenizeAndHumanizeSourceSpans('a')).toEqual([
[HtmlTokenType.TAG_OPEN_START, '
'],
[HtmlTokenType.TEXT, 'a'],
[HtmlTokenType.EOF, ''],
]);
expect(tokenizeAndHumanizeParts('< a>')).toEqual([
[HtmlTokenType.TEXT, '< a>'], [HtmlTokenType.EOF]
]);
});
// TODO(vicb): make the lexer aware of Angular expressions
// see https://github.com/angular/angular/issues/5679
it('should parse valid start tag in interpolation', () => {
expect(tokenizeAndHumanizeParts('{{ a d }}')).toEqual([
[HtmlTokenType.TEXT, '{{ a '], [HtmlTokenType.TAG_OPEN_START, null, 'b'],
[HtmlTokenType.ATTR_NAME, null, '&&'], [HtmlTokenType.ATTR_NAME, null, 'c'],
[HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.TEXT, ' d }}'], [HtmlTokenType.EOF]
]);
});
});
describe('raw text', () => {
it('should parse text', () => {
expect(tokenizeAndHumanizeParts(``)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
[HtmlTokenType.EOF]
]);
});
it('should not detect entities', () => {
expect(tokenizeAndHumanizeParts(``)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
[HtmlTokenType.EOF]
]);
});
it('should ignore other opening tags', () => {
expect(tokenizeAndHumanizeParts(``)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
[HtmlTokenType.EOF]
]);
});
it('should ignore other closing tags', () => {
expect(tokenizeAndHumanizeParts(``)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'script'],
[HtmlTokenType.EOF]
]);
});
it('should store the locations', () => {
expect(tokenizeAndHumanizeSourceSpans(``)).toEqual([
[HtmlTokenType.TAG_OPEN_START, ''],
[HtmlTokenType.EOF, '']
]);
});
});
describe('escapable raw text', () => {
it('should parse text', () => {
expect(tokenizeAndHumanizeParts(`
t\ne\rs\r\nt`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 't\ne\ns\nt'],
[HtmlTokenType.TAG_CLOSE, null, 'title'], [HtmlTokenType.EOF]
]);
});
it('should detect entities', () => {
expect(tokenizeAndHumanizeParts(`
&`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.ESCAPABLE_RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
[HtmlTokenType.EOF]
]);
});
it('should ignore other opening tags', () => {
expect(tokenizeAndHumanizeParts(`
a`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a
'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
[HtmlTokenType.EOF]
]);
});
it('should ignore other closing tags', () => {
expect(tokenizeAndHumanizeParts(`
a`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'title'],
[HtmlTokenType.EOF]
]);
});
it('should store the locations', () => {
expect(tokenizeAndHumanizeSourceSpans(`
a`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, '
'],
[HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, ''],
[HtmlTokenType.EOF, '']
]);
});
});
describe('expansion forms', () => {
it('should parse an expansion form', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true))
.toEqual([
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, '=5'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'five'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, 'foo'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'bar'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
[HtmlTokenType.EOF]
]);
});
it('should parse an expansion form with text elements surrounding it', () => {
expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([
[HtmlTokenType.TEXT, 'before'], [HtmlTokenType.EXPANSION_FORM_START],
[HtmlTokenType.RAW_TEXT, 'one.two'], [HtmlTokenType.RAW_TEXT, 'three'],
[HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], [HtmlTokenType.EXPANSION_CASE_EXP_START],
[HtmlTokenType.TEXT, 'four'], [HtmlTokenType.EXPANSION_CASE_EXP_END],
[HtmlTokenType.EXPANSION_FORM_END], [HtmlTokenType.TEXT, 'after'], [HtmlTokenType.EOF]
]);
});
it('should parse an expansion forms with elements in it', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four
a}}', true)).toEqual([
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four '],
[HtmlTokenType.TAG_OPEN_START, null, 'b'], [HtmlTokenType.TAG_OPEN_END],
[HtmlTokenType.TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'b'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
[HtmlTokenType.EOF]
]);
});
it('should parse an expansion forms with interpolation in it', () => {
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four {{a}}'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
[HtmlTokenType.EOF]
]);
});
it('should parse nested expansion forms', () => {
expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true))
.toEqual([
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'],
[HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'],
[HtmlTokenType.EXPANSION_CASE_EXP_START],
[HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'xx'],
[HtmlTokenType.RAW_TEXT, 'yy'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=x'],
[HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'one'],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
[HtmlTokenType.TEXT, ' '],
[HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END],
[HtmlTokenType.EOF]
]);
});
});
describe('errors', () => {
it('should include 2 lines of context in message', () => {
let src = '111\n222\n333\nE\n444\n555\n666\n';
let file = new ParseSourceFile(src, 'file://');
let location = new ParseLocation(file, 12, 123, 456);
let span = new ParseSourceSpan(location, location);
let error = new HtmlTokenError('**ERROR**', null, span);
expect(error.toString())
.toEqual(`**ERROR** ("\n222\n333\n[ERROR ->]E\n444\n555\n"): file://@123:456`);
});
});
describe('unicode characters', () => {
it('should support unicode characters', () => {
expect(tokenizeAndHumanizeSourceSpans(`
İ
`)).toEqual([
[HtmlTokenType.TAG_OPEN_START, '
'],
[HtmlTokenType.TEXT, 'İ'], [HtmlTokenType.TAG_CLOSE, '
'], [HtmlTokenType.EOF, '']
]);
});
});
});
}
function tokenizeWithoutErrors(
input: string, tokenizeExpansionForms: boolean = false,
interpolationConfig?: InterpolationConfig): HtmlToken[] {
var tokenizeResult = tokenizeHtml(input, 'someUrl', tokenizeExpansionForms, interpolationConfig);
if (tokenizeResult.errors.length > 0) {
var errorString = tokenizeResult.errors.join('\n');
throw new BaseException(`Unexpected parse errors:\n${errorString}`);
}
return tokenizeResult.tokens;
}
function tokenizeAndHumanizeParts(
input: string, tokenizeExpansionForms: boolean = false,
interpolationConfig?: InterpolationConfig): any[] {
return tokenizeWithoutErrors(input, tokenizeExpansionForms, interpolationConfig)
.map(token => [
token.type].concat(token.parts));
}
function tokenizeAndHumanizeSourceSpans(input: string): any[] {
return tokenizeWithoutErrors(input).map(token => [token.type, token.sourceSpan.toString()]);
}
function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`;
}
function tokenizeAndHumanizeLineColumn(input: string): any[] {
return tokenizeWithoutErrors(input).map(
token => [token.type, humanizeLineColumn(token.sourceSpan.start)]);
}
function tokenizeAndHumanizeErrors(input: string): any[] {
return tokenizeHtml(input, 'someUrl')
.errors.map(
tokenError =>
[tokenError.tokenType, tokenError.msg,
humanizeLineColumn(tokenError.span.start)]);
}