2016-06-23 12:47:54 -04:00
|
|
|
/**
|
|
|
|
* @license
|
|
|
|
* Copyright Google Inc. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by an MIT-style license that can be
|
|
|
|
* found in the LICENSE file at https://angular.io/license
|
|
|
|
*/
|
|
|
|
|
2016-09-27 20:12:25 -04:00
|
|
|
import {describe, expect, it} from '../../../core/testing/testing_internal';
|
2017-01-27 16:19:00 -05:00
|
|
|
import {CssLexer, CssLexerMode, CssToken, CssTokenType, cssScannerError, getRawMessage, getToken} from '../../src/css_parser/css_lexer';
|
2016-07-21 14:41:25 -04:00
|
|
|
import {isPresent} from '../../src/facade/lang';
|
2016-04-12 12:40:37 -04:00
|
|
|
|
2016-02-02 03:37:08 -05:00
|
|
|
export function main() {
|
2016-06-08 19:38:52 -04:00
|
|
|
function tokenize(
|
2016-07-21 14:41:25 -04:00
|
|
|
code: string, trackComments: boolean = false,
|
2016-06-08 19:38:52 -04:00
|
|
|
mode: CssLexerMode = CssLexerMode.ALL): CssToken[] {
|
2016-07-21 14:41:25 -04:00
|
|
|
const scanner = new CssLexer().scan(code, trackComments);
|
2016-02-02 03:37:08 -05:00
|
|
|
scanner.setMode(mode);
|
|
|
|
|
2016-11-12 08:08:58 -05:00
|
|
|
const tokens: CssToken[] = [];
|
|
|
|
let output = scanner.scan();
|
2016-02-02 03:37:08 -05:00
|
|
|
while (output != null) {
|
2016-11-12 08:08:58 -05:00
|
|
|
const error = output.error;
|
2016-02-02 03:37:08 -05:00
|
|
|
if (isPresent(error)) {
|
2017-01-27 16:19:00 -05:00
|
|
|
throw cssScannerError(getToken(error), getRawMessage(error));
|
2016-02-02 03:37:08 -05:00
|
|
|
}
|
|
|
|
tokens.push(output.token);
|
|
|
|
output = scanner.scan();
|
|
|
|
}
|
|
|
|
|
|
|
|
return tokens;
|
|
|
|
}
|
|
|
|
|
|
|
|
describe('CssLexer', () => {
|
|
|
|
it('should lex newline characters as whitespace when whitespace mode is on', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const newlines = ['\n', '\r\n', '\r', '\f'];
|
2016-02-02 03:37:08 -05:00
|
|
|
newlines.forEach((line) => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const token = tokenize(line, false, CssLexerMode.ALL_TRACK_WS)[0];
|
2016-02-02 03:37:08 -05:00
|
|
|
expect(token.type).toEqual(CssTokenType.Whitespace);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should combined newline characters as one newline token when whitespace mode is on', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const newlines = ['\n', '\r\n', '\r', '\f'].join('');
|
|
|
|
const tokens = tokenize(newlines, false, CssLexerMode.ALL_TRACK_WS);
|
2016-02-02 03:37:08 -05:00
|
|
|
expect(tokens.length).toEqual(1);
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Whitespace);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should not consider whitespace or newline values at all when whitespace mode is off',
|
|
|
|
() => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const newlines = ['\n', '\r\n', '\r', '\f'].join('');
|
|
|
|
const tokens = tokenize(newlines);
|
2016-02-02 03:37:08 -05:00
|
|
|
expect(tokens.length).toEqual(0);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should lex simple selectors and their inner properties', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '\n' +
|
2016-06-08 19:38:52 -04:00
|
|
|
' .selector { my-prop: my-value; }\n';
|
2016-11-12 08:08:58 -05:00
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[0].strValue).toEqual('.');
|
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[1].strValue).toEqual('selector');
|
|
|
|
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[2].strValue).toEqual('{');
|
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[3].strValue).toEqual('my-prop');
|
|
|
|
|
|
|
|
expect(tokens[4].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[4].strValue).toEqual(':');
|
|
|
|
|
|
|
|
expect(tokens[5].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[5].strValue).toEqual('my-value');
|
|
|
|
|
|
|
|
expect(tokens[6].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[6].strValue).toEqual(';');
|
|
|
|
|
|
|
|
expect(tokens[7].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[7].strValue).toEqual('}');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should capture the column and line values for each token', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '#id {\n' +
|
2016-06-08 19:38:52 -04:00
|
|
|
' prop:value;\n' +
|
|
|
|
'}';
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-11-12 08:08:58 -05:00
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
// #
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[0].column).toEqual(0);
|
|
|
|
expect(tokens[0].line).toEqual(0);
|
|
|
|
|
|
|
|
// id
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[1].column).toEqual(1);
|
|
|
|
expect(tokens[1].line).toEqual(0);
|
|
|
|
|
|
|
|
// {
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[2].column).toEqual(4);
|
|
|
|
expect(tokens[2].line).toEqual(0);
|
|
|
|
|
|
|
|
// prop
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[3].column).toEqual(2);
|
|
|
|
expect(tokens[3].line).toEqual(1);
|
|
|
|
|
|
|
|
// :
|
|
|
|
expect(tokens[4].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[4].column).toEqual(6);
|
|
|
|
expect(tokens[4].line).toEqual(1);
|
|
|
|
|
|
|
|
// value
|
|
|
|
expect(tokens[5].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[5].column).toEqual(7);
|
|
|
|
expect(tokens[5].line).toEqual(1);
|
|
|
|
|
|
|
|
// ;
|
|
|
|
expect(tokens[6].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[6].column).toEqual(12);
|
|
|
|
expect(tokens[6].line).toEqual(1);
|
|
|
|
|
|
|
|
// }
|
|
|
|
expect(tokens[7].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[7].column).toEqual(0);
|
|
|
|
expect(tokens[7].line).toEqual(2);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should lex quoted strings and escape accordingly', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = 'prop: \'some { value } \\\' that is quoted\'';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.String);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[2].strValue).toEqual('\'some { value } \\\' that is quoted\'');
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should treat attribute operators as regular characters', () => {
|
|
|
|
tokenize('^|~+*').forEach((token) => { expect(token.type).toEqual(CssTokenType.Character); });
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should lex numbers properly and set them as numbers', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '0 1 -2 3.0 -4.001';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[0].strValue).toEqual('0');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[1].strValue).toEqual('1');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[2].strValue).toEqual('-2');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[3].strValue).toEqual('3.0');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[4].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[4].strValue).toEqual('-4.001');
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should lex @keywords', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '@import()@something';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.AtKeyword);
|
|
|
|
expect(tokens[0].strValue).toEqual('@import');
|
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[1].strValue).toEqual('(');
|
|
|
|
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[2].strValue).toEqual(')');
|
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.AtKeyword);
|
|
|
|
expect(tokens[3].strValue).toEqual('@something');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should still lex a number even if it has a dimension suffix', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '40% is 40 percent';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Number);
|
|
|
|
expect(tokens[0].strValue).toEqual('40');
|
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[1].strValue).toEqual('%');
|
|
|
|
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[2].strValue).toEqual('is');
|
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Number);
|
|
|
|
expect(tokens[3].strValue).toEqual('40');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should allow escaped character and unicode character-strings in CSS selectors', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '\\123456 .some\\thing \{\}';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[0].strValue).toEqual('\\123456');
|
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Character);
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Identifier);
|
|
|
|
expect(tokens[2].strValue).toEqual('some\\thing');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should distinguish identifiers and numbers from special characters', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = 'one*two=-4+three-4-equals_value$';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[0].type).toEqual(CssTokenType.Identifier);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[0].strValue).toEqual('one');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[1].type).toEqual(CssTokenType.Character);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[1].strValue).toEqual('*');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[2].type).toEqual(CssTokenType.Identifier);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[2].strValue).toEqual('two');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Character);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[3].strValue).toEqual('=');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[4].type).toEqual(CssTokenType.Number);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[4].strValue).toEqual('-4');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[5].type).toEqual(CssTokenType.Character);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[5].strValue).toEqual('+');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[6].type).toEqual(CssTokenType.Identifier);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[6].strValue).toEqual('three-4-equals_value');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[7].type).toEqual(CssTokenType.Character);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[7].strValue).toEqual('$');
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should filter out comments and whitespace by default', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '.selector /* comment */ { /* value */ }';
|
|
|
|
const tokens = tokenize(cssCode);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[0].strValue).toEqual('.');
|
|
|
|
expect(tokens[1].strValue).toEqual('selector');
|
|
|
|
expect(tokens[2].strValue).toEqual('{');
|
|
|
|
expect(tokens[3].strValue).toEqual('}');
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should track comments when the flag is set to true', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '.selector /* comment */ { /* value */ }';
|
|
|
|
const trackComments = true;
|
|
|
|
const tokens = tokenize(cssCode, trackComments, CssLexerMode.ALL_TRACK_WS);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[0].strValue).toEqual('.');
|
|
|
|
expect(tokens[1].strValue).toEqual('selector');
|
|
|
|
expect(tokens[2].strValue).toEqual(' ');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[3].type).toEqual(CssTokenType.Comment);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[3].strValue).toEqual('/* comment */');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[4].strValue).toEqual(' ');
|
|
|
|
expect(tokens[5].strValue).toEqual('{');
|
|
|
|
expect(tokens[6].strValue).toEqual(' ');
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
expect(tokens[7].type).toEqual(CssTokenType.Comment);
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokens[7].strValue).toEqual('/* value */');
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
describe('Selector Mode', () => {
|
|
|
|
it('should throw an error if a selector is being parsed while in the wrong mode', () => {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = '.class > tag';
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-11-12 08:08:58 -05:00
|
|
|
let capturedMessage: string;
|
2016-02-02 03:37:08 -05:00
|
|
|
try {
|
|
|
|
tokenize(cssCode, false, CssLexerMode.STYLE_BLOCK);
|
|
|
|
} catch (e) {
|
2017-01-27 16:19:00 -05:00
|
|
|
capturedMessage = getRawMessage(e);
|
2016-02-02 03:37:08 -05:00
|
|
|
}
|
|
|
|
|
2016-06-22 17:53:02 -04:00
|
|
|
expect(capturedMessage).toMatch(/Unexpected character \[\>\] at column 0:7 in expression/g);
|
2016-02-02 03:37:08 -05:00
|
|
|
capturedMessage = null;
|
|
|
|
|
|
|
|
try {
|
|
|
|
tokenize(cssCode, false, CssLexerMode.SELECTOR);
|
|
|
|
} catch (e) {
|
2017-01-27 16:19:00 -05:00
|
|
|
capturedMessage = getRawMessage(e);
|
2016-02-02 03:37:08 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
expect(capturedMessage).toEqual(null);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('Attribute Mode', () => {
|
|
|
|
it('should consider attribute selectors as valid input and throw when an invalid modifier is used',
|
|
|
|
() => {
|
2016-10-07 20:36:08 -04:00
|
|
|
function tokenizeAttr(modifier: string) {
|
2016-11-12 08:08:58 -05:00
|
|
|
const cssCode = 'value' + modifier + '=\'something\'';
|
2016-02-02 03:37:08 -05:00
|
|
|
return tokenize(cssCode, false, CssLexerMode.ATTRIBUTE_SELECTOR);
|
|
|
|
}
|
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokenizeAttr('*').length).toEqual(4);
|
|
|
|
expect(tokenizeAttr('|').length).toEqual(4);
|
|
|
|
expect(tokenizeAttr('^').length).toEqual(4);
|
|
|
|
expect(tokenizeAttr('$').length).toEqual(4);
|
|
|
|
expect(tokenizeAttr('~').length).toEqual(4);
|
|
|
|
expect(tokenizeAttr('').length).toEqual(3);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(() => { tokenizeAttr('+'); }).toThrow();
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('Media Query Mode', () => {
|
|
|
|
it('should validate media queries with a reduced subset of valid characters', () => {
|
2016-07-21 14:41:25 -04:00
|
|
|
function tokenizeQuery(code: string) {
|
2016-06-08 19:38:52 -04:00
|
|
|
return tokenize(code, false, CssLexerMode.MEDIA_QUERY);
|
|
|
|
}
|
2016-02-02 03:37:08 -05:00
|
|
|
|
|
|
|
// the reason why the numbers are so high is because MediaQueries keep
|
|
|
|
// track of the whitespace values
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokenizeQuery('(prop: value)').length).toEqual(5);
|
|
|
|
expect(tokenizeQuery('(prop: value) and (prop2: value2)').length).toEqual(11);
|
|
|
|
expect(tokenizeQuery('tv and (prop: value)').length).toEqual(7);
|
|
|
|
expect(tokenizeQuery('print and ((prop: value) or (prop2: value2))').length).toEqual(15);
|
|
|
|
expect(tokenizeQuery('(content: \'something $ crazy inside &\')').length).toEqual(5);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(() => { tokenizeQuery('(max-height: 10 + 20)'); }).toThrow();
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(() => { tokenizeQuery('(max-height: fifty < 100)'); }).toThrow();
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('Pseudo Selector Mode', () => {
|
|
|
|
it('should validate pseudo selector identifiers with a reduced subset of valid characters',
|
|
|
|
() => {
|
2016-06-14 19:26:57 -04:00
|
|
|
function tokenizePseudo(code: string, withArgs = false): CssToken[] {
|
2016-11-12 08:08:58 -05:00
|
|
|
const mode = withArgs ? CssLexerMode.PSEUDO_SELECTOR_WITH_ARGUMENTS :
|
|
|
|
CssLexerMode.PSEUDO_SELECTOR;
|
2016-06-14 19:26:57 -04:00
|
|
|
return tokenize(code, false, mode);
|
2016-02-02 03:37:08 -05:00
|
|
|
}
|
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokenizePseudo('hover').length).toEqual(1);
|
|
|
|
expect(tokenizePseudo('focus').length).toEqual(1);
|
2016-06-14 19:26:57 -04:00
|
|
|
expect(tokenizePseudo('lang(en-us)', true).length).toEqual(4);
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-14 19:26:57 -04:00
|
|
|
expect(() => { tokenizePseudo('lang(something:broken)', true); }).toThrow();
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-14 19:26:57 -04:00
|
|
|
expect(() => { tokenizePseudo('not(.selector)', true); }).toThrow();
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
describe(
|
|
|
|
'Style Block Mode', () => {
|
|
|
|
it('should style blocks with a reduced subset of valid characters',
|
|
|
|
() => {
|
2016-07-21 14:41:25 -04:00
|
|
|
function tokenizeStyles(code: string) {
|
2016-06-08 19:38:52 -04:00
|
|
|
return tokenize(code, false, CssLexerMode.STYLE_BLOCK);
|
|
|
|
}
|
2016-02-02 03:37:08 -05:00
|
|
|
|
2016-06-08 19:38:52 -04:00
|
|
|
expect(tokenizeStyles(`
|
2016-02-02 03:37:08 -05:00
|
|
|
key: value;
|
|
|
|
prop: 100;
|
|
|
|
style: value3!important;
|
2016-06-08 19:38:52 -04:00
|
|
|
`).length).toEqual(14);
|
|
|
|
|
|
|
|
expect(() => tokenizeStyles(` key$: value; `)).toThrow();
|
|
|
|
expect(() => tokenizeStyles(` key: value$; `)).toThrow();
|
|
|
|
expect(() => tokenizeStyles(` key: value + 10; `)).toThrow();
|
|
|
|
expect(() => tokenizeStyles(` key: &value; `)).toThrow();
|
|
|
|
});
|
|
|
|
});
|
2016-02-02 03:37:08 -05:00
|
|
|
});
|
|
|
|
}
|