diff --git a/packages/compiler/src/ml_parser/lexer.ts b/packages/compiler/src/ml_parser/lexer.ts index a2d734bf02..cff42e4763 100644 --- a/packages/compiler/src/ml_parser/lexer.ts +++ b/packages/compiler/src/ml_parser/lexer.ts @@ -101,6 +101,10 @@ export interface TokenizeOptions { * included in source-map segments. A common example is whitespace. */ leadingTriviaChars?: string[]; + /** + * If true, do not convert CRLF to LF. + */ + preserveLineEndings?: boolean; } export function tokenize( @@ -134,6 +138,7 @@ class _Tokenizer { private _currentTokenType: TokenType|null = null; private _expansionCaseStack: TokenType[] = []; private _inInterpolation: boolean = false; + private readonly _preserveLineEndings: boolean; tokens: Token[] = []; errors: TokenError[] = []; @@ -153,6 +158,7 @@ class _Tokenizer { options.range || {endPos: _file.content.length, startPos: 0, startLine: 0, startCol: 0}; this._cursor = options.escapedString ? new EscapedCharacterCursor(_file, range) : new PlainCharacterCursor(_file, range); + this._preserveLineEndings = options.preserveLineEndings || false; try { this._cursor.init(); } catch (e) { @@ -161,6 +167,9 @@ class _Tokenizer { } private _processCarriageReturns(content: string): string { + if (this._preserveLineEndings) { + return content; + } // http://www.w3.org/TR/html5/syntax.html#preprocessing-the-input-stream // In order to keep the original position in the source, we can not // pre-process it. diff --git a/packages/language-service/src/typescript_host.ts b/packages/language-service/src/typescript_host.ts index f0e5280744..25a3f2e318 100644 --- a/packages/language-service/src/typescript_host.ts +++ b/packages/language-service/src/typescript_host.ts @@ -510,6 +510,7 @@ export class TypeScriptServiceHost implements LanguageServiceHost { ); const htmlResult = htmlParser.parse(template.source, fileName, { tokenizeExpansionForms: true, + preserveLineEndings: true, // do not convert CRLF to LF }); const {directives, pipes, schemas} = this.getModuleMetadataForDirective(classSymbol); const parseResult = diff --git a/packages/language-service/test/diagnostics_spec.ts b/packages/language-service/test/diagnostics_spec.ts index 4378c95468..1db4fa0b84 100644 --- a/packages/language-service/test/diagnostics_spec.ts +++ b/packages/language-service/test/diagnostics_spec.ts @@ -645,30 +645,27 @@ describe('diagnostics', () => { }); }); - // https://github.com/angular/vscode-ng-language-service/issues/235 - // There is no easy fix for this issue currently due to the way template - // tokenization is done. In the example below, the whole string - // `\r\n{{line0}}\r\n{{line1}}\r\n{{line2}}` is tokenized as a whole, and then - // CR characters are stripped from it. Source span information is lost in the - // process. For more discussion, see the link above. - /* it('should work correctly with CRLF endings', () => { + // https://github.com/angular/vscode-ng-language-service/issues/235 + // In the example below, the string + // `\r\n{{line0}}\r\n{{line1}}\r\n{{line2}}` is tokenized as a whole, + // and then CRLF characters are converted to LF. + // Source span information is lost in the process. const fileName = '/app/test.ng'; - const content = mockHost.override(fileName, - '\r\n