From 60e6f91a53ac8a81071e5f06eeba4828c3548b9f Mon Sep 17 00:00:00 2001 From: Victor Berchet Date: Fri, 24 Jun 2016 14:31:35 -0700 Subject: [PATCH] refactor(HmtlLexer): cleanup --- modules/@angular/compiler-cli/src/codegen.ts | 4 +- .../@angular/compiler-cli/src/extract_i18n.ts | 4 +- modules/@angular/compiler/src/assertions.ts | 3 +- .../compiler/src/directive_normalizer.ts | 1 + modules/@angular/compiler/src/html_lexer.ts | 128 ++--- modules/@angular/compiler/src/html_parser.ts | 6 +- .../compiler/src/i18n/xmb_serializer.ts | 7 +- .../compiler/src/metadata_resolver.ts | 2 +- .../@angular/compiler/test/html_lexer_spec.ts | 471 +++++++++++------- .../compiler/test/html_parser_spec.ts | 10 +- .../compiler/test/i18n/expander_spec.ts | 6 +- .../test/i18n/i18n_html_parser_spec.ts | 5 +- .../test/i18n/message_extractor_spec.ts | 2 +- .../dom_element_schema_registry_spec.ts | 8 +- 14 files changed, 374 insertions(+), 283 deletions(-) diff --git a/modules/@angular/compiler-cli/src/codegen.ts b/modules/@angular/compiler-cli/src/codegen.ts index 4f565e44ee..387cd9947f 100644 --- a/modules/@angular/compiler-cli/src/codegen.ts +++ b/modules/@angular/compiler-cli/src/codegen.ts @@ -126,8 +126,7 @@ export class CodeGenerator { const reflectorHost = new ReflectorHost(program, compilerHost, options, reflectorHostContext); const staticReflector = new StaticReflector(reflectorHost); StaticAndDynamicReflectionCapabilities.install(staticReflector); - const expressionParser = new Parser(new Lexer()); - const htmlParser = new HtmlParser(expressionParser); + const htmlParser = new HtmlParser(); const config = new compiler.CompilerConfig({ genDebugInfo: options.debug === true, defaultEncapsulation: ViewEncapsulation.Emulated, @@ -135,6 +134,7 @@ export class CodeGenerator { useJit: false }); const normalizer = new DirectiveNormalizer(xhr, urlResolver, htmlParser, config); + const expressionParser = new Parser(new Lexer()); const tmplParser = new TemplateParser( expressionParser, new DomElementSchemaRegistry(), htmlParser, /*console*/ null, []); diff --git a/modules/@angular/compiler-cli/src/extract_i18n.ts b/modules/@angular/compiler-cli/src/extract_i18n.ts index 5f917f628e..759128cc7e 100644 --- a/modules/@angular/compiler-cli/src/extract_i18n.ts +++ b/modules/@angular/compiler-cli/src/extract_i18n.ts @@ -137,8 +137,7 @@ class Extractor { const reflectorHost = new ReflectorHost(program, compilerHost, options); const staticReflector = new StaticReflector(reflectorHost); StaticAndDynamicReflectionCapabilities.install(staticReflector); - const expressionParser = new Parser(new Lexer()); - const htmlParser = new HtmlParser(expressionParser); + const htmlParser = new HtmlParser(); const config = new compiler.CompilerConfig({ genDebugInfo: true, defaultEncapsulation: ViewEncapsulation.Emulated, @@ -146,6 +145,7 @@ class Extractor { useJit: false }); const normalizer = new DirectiveNormalizer(xhr, urlResolver, htmlParser, config); + const expressionParser = new Parser(new Lexer()); const resolver = new CompileMetadataResolver( new compiler.DirectiveResolver(staticReflector), new compiler.PipeResolver(staticReflector), new compiler.ViewResolver(staticReflector), config, staticReflector); diff --git a/modules/@angular/compiler/src/assertions.ts b/modules/@angular/compiler/src/assertions.ts index f3b0010de5..16e6259dc1 100644 --- a/modules/@angular/compiler/src/assertions.ts +++ b/modules/@angular/compiler/src/assertions.ts @@ -28,7 +28,8 @@ const INTERPOLATION_BLACKLIST_REGEXPS = [ /^\s*$/, // empty /[<>]/, // html tag /^[{}]$/, // i18n expansion - /&(#|[a-z])/i, // character reference + /&(#|[a-z])/i, // character reference, + /^\/\//, // comment ]; export function assertInterpolationSymbols(identifier: string, value: any): void { diff --git a/modules/@angular/compiler/src/directive_normalizer.ts b/modules/@angular/compiler/src/directive_normalizer.ts index 2da28d1196..b0685e1993 100644 --- a/modules/@angular/compiler/src/directive_normalizer.ts +++ b/modules/@angular/compiler/src/directive_normalizer.ts @@ -14,6 +14,7 @@ import {CompileDirectiveMetadata, CompileStylesheetMetadata, CompileTemplateMeta import {CompilerConfig} from './config'; import {HtmlAstVisitor, HtmlAttrAst, HtmlCommentAst, HtmlElementAst, HtmlExpansionAst, HtmlExpansionCaseAst, HtmlTextAst, htmlVisitAll} from './html_ast'; import {HtmlParser} from './html_parser'; +import {InterpolationConfig} from './interpolation_config'; import {extractStyleUrls, isStyleUrlResolvable} from './style_url_resolver'; import {PreparsedElementType, preparseElement} from './template_preparser'; import {UrlResolver} from './url_resolver'; diff --git a/modules/@angular/compiler/src/html_lexer.ts b/modules/@angular/compiler/src/html_lexer.ts index 0c6ca027a0..cb9b256c95 100644 --- a/modules/@angular/compiler/src/html_lexer.ts +++ b/modules/@angular/compiler/src/html_lexer.ts @@ -7,8 +7,7 @@ */ import * as chars from './chars'; -import {Parser as ExpressionParser} from './expression_parser/parser'; -import {NumberWrapper, StringWrapper, isBlank, isPresent} from './facade/lang'; +import {isBlank, isPresent} from './facade/lang'; import {HtmlTagContentType, NAMED_ENTITIES, getHtmlTagDefinition} from './html_tags'; import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config'; import {ParseError, ParseLocation, ParseSourceFile, ParseSourceSpan} from './parse_util'; @@ -33,7 +32,6 @@ export enum HtmlTokenType { EXPANSION_CASE_EXP_START, EXPANSION_CASE_EXP_END, EXPANSION_FORM_END, - INTERPOLATION, EOF } @@ -53,11 +51,10 @@ export class HtmlTokenizeResult { } export function tokenizeHtml( - sourceContent: string, sourceUrl: string, parser: ExpressionParser, - tokenizeExpansionForms: boolean = false, + sourceContent: string, sourceUrl: string, tokenizeExpansionForms: boolean = false, interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): HtmlTokenizeResult { return new _HtmlTokenizer( - new ParseSourceFile(sourceContent, sourceUrl), tokenizeExpansionForms, parser, + new ParseSourceFile(sourceContent, sourceUrl), tokenizeExpansionForms, interpolationConfig) .tokenize(); } @@ -65,7 +62,7 @@ export function tokenizeHtml( const _CR_OR_CRLF_REGEXP = /\r\n?/g; function _unexpectedCharacterErrorMsg(charCode: number): string { - var char = charCode === chars.$EOF ? 'EOF' : StringWrapper.fromCharCode(charCode); + const char = charCode === chars.$EOF ? 'EOF' : String.fromCharCode(charCode); return `Unexpected character "${char}"`; } @@ -98,12 +95,10 @@ class _HtmlTokenizer { /** * @param _file The html source * @param _tokenizeIcu Whether to tokenize ICU messages (considered as text nodes when false) - * @param _expressionParser Used to check syntax of interpolations * @param _interpolationConfig */ constructor( private _file: ParseSourceFile, private _tokenizeIcu: boolean, - private _expressionParser: ExpressionParser, private _interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) { this._input = _file.content; this._length = _file.content.length; @@ -115,12 +110,12 @@ class _HtmlTokenizer { // In order to keep the original position in the source, we can not // pre-process it. // Instead CRs are processed right before instantiating the tokens. - return StringWrapper.replaceAll(content, _CR_OR_CRLF_REGEXP, '\n'); + return content.replace(_CR_OR_CRLF_REGEXP, '\n'); } tokenize(): HtmlTokenizeResult { while (this._peek !== chars.$EOF) { - var start = this._getLocation(); + const start = this._getLocation(); try { if (this._attemptCharCode(chars.$LT)) { if (this._attemptCharCode(chars.$BANG)) { @@ -157,7 +152,7 @@ class _HtmlTokenizer { * @internal */ private _tokenizeExpansionForm(): boolean { - if (isExpansionFormStart(this._input, this._index, this._interpolationConfig.start)) { + if (isExpansionFormStart(this._input, this._index, this._interpolationConfig)) { this._consumeExpansionFormStart(); return true; } @@ -186,29 +181,19 @@ class _HtmlTokenizer { return new ParseLocation(this._file, this._index, this._line, this._column); } - private _getSpan(start?: ParseLocation, end?: ParseLocation): ParseSourceSpan { - if (isBlank(start)) { - start = this._getLocation(); - } - if (isBlank(end)) { - end = this._getLocation(); - } + private _getSpan( + start: ParseLocation = this._getLocation(), + end: ParseLocation = this._getLocation()): ParseSourceSpan { return new ParseSourceSpan(start, end); } - private _beginToken(type: HtmlTokenType, start: ParseLocation = null) { - if (isBlank(start)) { - start = this._getLocation(); - } + private _beginToken(type: HtmlTokenType, start: ParseLocation = this._getLocation()) { this._currentTokenStart = start; this._currentTokenType = type; } - private _endToken(parts: string[], end: ParseLocation = null): HtmlToken { - if (isBlank(end)) { - end = this._getLocation(); - } - var token = new HtmlToken( + private _endToken(parts: string[], end: ParseLocation = this._getLocation()): HtmlToken { + const token = new HtmlToken( this._currentTokenType, parts, new ParseSourceSpan(this._currentTokenStart, end)); this.tokens.push(token); this._currentTokenStart = null; @@ -217,7 +202,7 @@ class _HtmlTokenizer { } private _createError(msg: string, span: ParseSourceSpan): _ControlFlowError { - var error = new HtmlTokenError(msg, this._currentTokenType, span); + const error = new HtmlTokenError(msg, this._currentTokenType, span); this._currentTokenStart = null; this._currentTokenType = null; return new _ControlFlowError(error); @@ -234,11 +219,9 @@ class _HtmlTokenizer { this._column++; } this._index++; - this._peek = this._index >= this._length ? chars.$EOF : - StringWrapper.charCodeAt(this._input, this._index); - this._nextPeek = this._index + 1 >= this._length ? - chars.$EOF : - StringWrapper.charCodeAt(this._input, this._index + 1); + this._peek = this._index >= this._length ? chars.$EOF : this._input.charCodeAt(this._index); + this._nextPeek = + this._index + 1 >= this._length ? chars.$EOF : this._input.charCodeAt(this._index + 1); } private _attemptCharCode(charCode: number): boolean { @@ -258,7 +241,7 @@ class _HtmlTokenizer { } private _requireCharCode(charCode: number) { - var location = this._getLocation(); + const location = this._getLocation(); if (!this._attemptCharCode(charCode)) { throw this._createError( _unexpectedCharacterErrorMsg(this._peek), this._getSpan(location, location)); @@ -271,8 +254,8 @@ class _HtmlTokenizer { return false; } const initialPosition = this._savePosition(); - for (var i = 0; i < len; i++) { - if (!this._attemptCharCode(StringWrapper.charCodeAt(chars, i))) { + for (let i = 0; i < len; i++) { + if (!this._attemptCharCode(chars.charCodeAt(i))) { // If attempting to parse the string fails, we want to reset the parser // to where it was before the attempt this._restorePosition(initialPosition); @@ -283,8 +266,8 @@ class _HtmlTokenizer { } private _attemptStrCaseInsensitive(chars: string): boolean { - for (var i = 0; i < chars.length; i++) { - if (!this._attemptCharCodeCaseInsensitive(StringWrapper.charCodeAt(chars, i))) { + for (let i = 0; i < chars.length; i++) { + if (!this._attemptCharCodeCaseInsensitive(chars.charCodeAt(i))) { return false; } } @@ -292,7 +275,7 @@ class _HtmlTokenizer { } private _requireStr(chars: string) { - var location = this._getLocation(); + const location = this._getLocation(); if (!this._attemptStr(chars)) { throw this._createError(_unexpectedCharacterErrorMsg(this._peek), this._getSpan(location)); } @@ -305,7 +288,7 @@ class _HtmlTokenizer { } private _requireCharCodeUntilFn(predicate: (code: number) => boolean, len: number) { - var start = this._getLocation(); + const start = this._getLocation(); this._attemptCharCodeUntilFn(predicate); if (this._index - start.offset < len) { throw this._createError( @@ -323,14 +306,14 @@ class _HtmlTokenizer { if (decodeEntities && this._peek === chars.$AMPERSAND) { return this._decodeEntity(); } else { - var index = this._index; + const index = this._index; this._advance(); return this._input[index]; } } private _decodeEntity(): string { - var start = this._getLocation(); + const start = this._getLocation(); this._advance(); if (this._attemptCharCode(chars.$HASH)) { let isHex = this._attemptCharCode(chars.$x) || this._attemptCharCode(chars.$X); @@ -342,8 +325,8 @@ class _HtmlTokenizer { this._advance(); let strNum = this._input.substring(numberStart, this._index - 1); try { - let charCode = NumberWrapper.parseInt(strNum, isHex ? 16 : 10); - return StringWrapper.fromCharCode(charCode); + let charCode = parseInt(strNum, isHex ? 16 : 10); + return String.fromCharCode(charCode); } catch (e) { let entity = this._input.substring(start.offset + 1, this._index - 1); throw this._createError(_unknownEntityErrorMsg(entity), this._getSpan(start)); @@ -367,11 +350,11 @@ class _HtmlTokenizer { private _consumeRawText( decodeEntities: boolean, firstCharOfEnd: number, attemptEndRest: () => boolean): HtmlToken { - var tagCloseStart: ParseLocation; - var textStart = this._getLocation(); + let tagCloseStart: ParseLocation; + const textStart = this._getLocation(); this._beginToken( decodeEntities ? HtmlTokenType.ESCAPABLE_RAW_TEXT : HtmlTokenType.RAW_TEXT, textStart); - var parts: string[] = []; + const parts: string[] = []; while (true) { tagCloseStart = this._getLocation(); if (this._attemptCharCode(firstCharOfEnd) && attemptEndRest()) { @@ -392,7 +375,7 @@ class _HtmlTokenizer { this._beginToken(HtmlTokenType.COMMENT_START, start); this._requireCharCode(chars.$MINUS); this._endToken([]); - var textToken = this._consumeRawText(false, chars.$MINUS, () => this._attemptStr('->')); + const textToken = this._consumeRawText(false, chars.$MINUS, () => this._attemptStr('->')); this._beginToken(HtmlTokenType.COMMENT_END, textToken.sourceSpan.end); this._endToken([]); } @@ -401,7 +384,7 @@ class _HtmlTokenizer { this._beginToken(HtmlTokenType.CDATA_START, start); this._requireStr('CDATA['); this._endToken([]); - var textToken = this._consumeRawText(false, chars.$RBRACKET, () => this._attemptStr(']>')); + const textToken = this._consumeRawText(false, chars.$RBRACKET, () => this._attemptStr(']>')); this._beginToken(HtmlTokenType.CDATA_END, textToken.sourceSpan.end); this._endToken([]); } @@ -414,12 +397,12 @@ class _HtmlTokenizer { } private _consumePrefixAndName(): string[] { - var nameOrPrefixStart = this._index; - var prefix: string = null; + const nameOrPrefixStart = this._index; + let prefix: string = null; while (this._peek !== chars.$COLON && !isPrefixEnd(this._peek)) { this._advance(); } - var nameStart: number; + let nameStart: number; if (this._peek === chars.$COLON) { this._advance(); prefix = this._input.substring(nameOrPrefixStart, this._index - 1); @@ -428,7 +411,7 @@ class _HtmlTokenizer { nameStart = nameOrPrefixStart; } this._requireCharCodeUntilFn(isNameEnd, this._index === nameStart ? 1 : 0); - var name = this._input.substring(nameStart, this._index); + const name = this._input.substring(nameStart, this._index); return [prefix, name]; } @@ -439,7 +422,7 @@ class _HtmlTokenizer { if (!chars.isAsciiLetter(this._peek)) { throw this._createError(_unexpectedCharacterErrorMsg(this._peek), this._getSpan()); } - var nameStart = this._index; + const nameStart = this._index; this._consumeTagOpenStart(start); lowercaseTagName = this._input.substring(nameStart, this._index).toLowerCase(); this._attemptCharCodeUntilFn(isNotWhitespace); @@ -466,7 +449,7 @@ class _HtmlTokenizer { throw e; } - var contentTokenType = getHtmlTagDefinition(lowercaseTagName).contentType; + const contentTokenType = getHtmlTagDefinition(lowercaseTagName).contentType; if (contentTokenType === HtmlTagContentType.RAW_TEXT) { this._consumeRawTextWithTagClose(lowercaseTagName, false); } else if (contentTokenType === HtmlTagContentType.ESCAPABLE_RAW_TEXT) { @@ -475,13 +458,12 @@ class _HtmlTokenizer { } private _consumeRawTextWithTagClose(lowercaseTagName: string, decodeEntities: boolean) { - var textToken = this._consumeRawText(decodeEntities, chars.$LT, () => { + const textToken = this._consumeRawText(decodeEntities, chars.$LT, () => { if (!this._attemptCharCode(chars.$SLASH)) return false; this._attemptCharCodeUntilFn(isNotWhitespace); if (!this._attemptStrCaseInsensitive(lowercaseTagName)) return false; this._attemptCharCodeUntilFn(isNotWhitespace); - if (!this._attemptCharCode(chars.$GT)) return false; - return true; + return this._attemptCharCode(chars.$GT); }); this._beginToken(HtmlTokenType.TAG_CLOSE, textToken.sourceSpan.end); this._endToken([null, lowercaseTagName]); @@ -489,13 +471,13 @@ class _HtmlTokenizer { private _consumeTagOpenStart(start: ParseLocation) { this._beginToken(HtmlTokenType.TAG_OPEN_START, start); - var parts = this._consumePrefixAndName(); + const parts = this._consumePrefixAndName(); this._endToken(parts); } private _consumeAttributeName() { this._beginToken(HtmlTokenType.ATTR_NAME); - var prefixAndName = this._consumePrefixAndName(); + const prefixAndName = this._consumePrefixAndName(); this._endToken(prefixAndName); } @@ -520,8 +502,8 @@ class _HtmlTokenizer { } private _consumeTagOpenEnd() { - var tokenType = this._attemptCharCode(chars.$SLASH) ? HtmlTokenType.TAG_OPEN_END_VOID : - HtmlTokenType.TAG_OPEN_END; + const tokenType = this._attemptCharCode(chars.$SLASH) ? HtmlTokenType.TAG_OPEN_END_VOID : + HtmlTokenType.TAG_OPEN_END; this._beginToken(tokenType); this._requireCharCode(chars.$GT); this._endToken([]); @@ -542,7 +524,7 @@ class _HtmlTokenizer { this._endToken([]); this._beginToken(HtmlTokenType.RAW_TEXT, this._getLocation()); - let condition = this._readUntil(chars.$COMMA); + const condition = this._readUntil(chars.$COMMA); this._endToken([condition], this._getLocation()); this._requireCharCode(chars.$COMMA); this._attemptCharCodeUntilFn(isNotWhitespace); @@ -558,7 +540,7 @@ class _HtmlTokenizer { private _consumeExpansionCaseStart() { this._beginToken(HtmlTokenType.EXPANSION_CASE_VALUE, this._getLocation()); - let value = this._readUntil(chars.$LBRACE).trim(); + const value = this._readUntil(chars.$LBRACE).trim(); this._endToken([value], this._getLocation()); this._attemptCharCodeUntilFn(isNotWhitespace); @@ -588,10 +570,9 @@ class _HtmlTokenizer { } private _consumeText() { - var start = this._getLocation(); + const start = this._getLocation(); this._beginToken(HtmlTokenType.TEXT, start); - - var parts: string[] = []; + const parts: string[] = []; do { if (this._attemptStr(this._interpolationConfig.start)) { @@ -613,13 +594,13 @@ class _HtmlTokenizer { return true; } - if (this._tokenizeIcu) { - if (isExpansionFormStart(this._input, this._index, this._interpolationConfig.start)) { + if (this._tokenizeIcu && !this._inInterpolation) { + if (isExpansionFormStart(this._input, this._index, this._interpolationConfig)) { // start of an expansion form return true; } - if (this._peek === chars.$RBRACE && !this._inInterpolation && this._isInExpansionCase()) { + if (this._peek === chars.$RBRACE && this._isInExpansionCase()) { // end of and expansion case return true; } @@ -685,9 +666,10 @@ function isNamedEntityEnd(code: number): boolean { return code == chars.$SEMICOLON || code == chars.$EOF || !chars.isAsciiLetter(code); } -function isExpansionFormStart(input: string, offset: number, interpolationStart: string): boolean { +function isExpansionFormStart( + input: string, offset: number, interpolationConfig: InterpolationConfig): boolean { return input.charCodeAt(offset) == chars.$LBRACE && - input.indexOf(interpolationStart, offset) != offset; + input.indexOf(interpolationConfig.start, offset) != offset; } function isExpansionCaseStart(peek: number): boolean { diff --git a/modules/@angular/compiler/src/html_parser.ts b/modules/@angular/compiler/src/html_parser.ts index 8746b7c82d..1b8d0a07c6 100644 --- a/modules/@angular/compiler/src/html_parser.ts +++ b/modules/@angular/compiler/src/html_parser.ts @@ -30,14 +30,12 @@ export class HtmlParseTreeResult { @Injectable() export class HtmlParser { - constructor(public _expressionParser: ExpressionParser) {} - parse( sourceContent: string, sourceUrl: string, parseExpansionForms: boolean = false, interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): HtmlParseTreeResult { - var tokensAndErrors = tokenizeHtml( - sourceContent, sourceUrl, this._expressionParser, parseExpansionForms, interpolationConfig); + var tokensAndErrors = + tokenizeHtml(sourceContent, sourceUrl, parseExpansionForms, interpolationConfig); var treeAndErrors = new TreeBuilder(tokensAndErrors.tokens).build(); return new HtmlParseTreeResult( treeAndErrors.rootNodes, diff --git a/modules/@angular/compiler/src/i18n/xmb_serializer.ts b/modules/@angular/compiler/src/i18n/xmb_serializer.ts index 9d96354b41..7ba919859b 100644 --- a/modules/@angular/compiler/src/i18n/xmb_serializer.ts +++ b/modules/@angular/compiler/src/i18n/xmb_serializer.ts @@ -6,9 +6,6 @@ * found in the LICENSE file at https://angular.io/license */ -import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer'; -import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser'; - import {RegExpWrapper, isBlank, isPresent} from '../facade/lang'; import {HtmlAst, HtmlElementAst} from '../html_ast'; import {HtmlParser} from '../html_parser'; @@ -37,9 +34,7 @@ export class XmbDeserializationError extends ParseError { } export function deserializeXmb(content: string, url: string): XmbDeserializationResult { - const expLexer = new ExpressionLexer(); - const expParser = new ExpressionParser(expLexer); - const parser = new HtmlParser(expParser); + const parser = new HtmlParser(); const normalizedContent = _expandPlaceholder(content.trim()); const parsed = parser.parse(normalizedContent, url); diff --git a/modules/@angular/compiler/src/metadata_resolver.ts b/modules/@angular/compiler/src/metadata_resolver.ts index 3c27af76ff..278f9c7074 100644 --- a/modules/@angular/compiler/src/metadata_resolver.ts +++ b/modules/@angular/compiler/src/metadata_resolver.ts @@ -6,7 +6,7 @@ * found in the LICENSE file at https://angular.io/license */ -import {AnimationAnimateMetadata, AnimationEntryMetadata, AnimationGroupMetadata, AnimationKeyframesSequenceMetadata, AnimationMetadata, AnimationStateDeclarationMetadata, AnimationStateMetadata, AnimationStateTransitionMetadata, AnimationStyleMetadata, AnimationWithStepsMetadata, AppModuleMetadata, AttributeMetadata, ComponentMetadata, HostMetadata, Inject, InjectMetadata, Injectable, Optional, OptionalMetadata, Provider, QueryMetadata, SelfMetadata, SkipSelfMetadata, ViewMetadata, ViewQueryMetadata, resolveForwardRef} from '@angular/core'; +import {AnimationAnimateMetadata, AnimationEntryMetadata, AnimationGroupMetadata, AnimationKeyframesSequenceMetadata, AnimationMetadata, AnimationStateDeclarationMetadata, AnimationStateMetadata, AnimationStateTransitionMetadata, AnimationStyleMetadata, AnimationWithStepsMetadata, AppModuleMetadata, AttributeMetadata, ChangeDetectionStrategy, ComponentMetadata, HostMetadata, Inject, InjectMetadata, Injectable, Optional, OptionalMetadata, Provider, QueryMetadata, SelfMetadata, SkipSelfMetadata, ViewMetadata, ViewQueryMetadata, resolveForwardRef} from '@angular/core'; import {LIFECYCLE_HOOKS_VALUES, ReflectorReader, createProvider, isProviderLiteral, reflector} from '../core_private'; import {StringMapWrapper} from '../src/facade/collection'; diff --git a/modules/@angular/compiler/test/html_lexer_spec.ts b/modules/@angular/compiler/test/html_lexer_spec.ts index fbb63cfde9..c6da634504 100644 --- a/modules/@angular/compiler/test/html_lexer_spec.ts +++ b/modules/@angular/compiler/test/html_lexer_spec.ts @@ -6,8 +6,6 @@ * found in the LICENSE file at https://angular.io/license */ -import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer'; -import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser'; import {HtmlToken, HtmlTokenError, HtmlTokenType, tokenizeHtml} from '@angular/compiler/src/html_lexer'; import {InterpolationConfig} from '@angular/compiler/src/interpolation_config'; import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '@angular/compiler/src/parse_util'; @@ -18,33 +16,41 @@ export function main() { describe('line/column numbers', () => { it('should work without newlines', () => { expect(tokenizeAndHumanizeLineColumn('a')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '0:2'], - [HtmlTokenType.TEXT, '0:3'], [HtmlTokenType.TAG_CLOSE, '0:4'], - [HtmlTokenType.EOF, '0:8'] + [HtmlTokenType.TAG_OPEN_START, '0:0'], + [HtmlTokenType.TAG_OPEN_END, '0:2'], + [HtmlTokenType.TEXT, '0:3'], + [HtmlTokenType.TAG_CLOSE, '0:4'], + [HtmlTokenType.EOF, '0:8'], ]); }); it('should work with one newline', () => { expect(tokenizeAndHumanizeLineColumn('\na')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '0:2'], - [HtmlTokenType.TEXT, '0:3'], [HtmlTokenType.TAG_CLOSE, '1:1'], - [HtmlTokenType.EOF, '1:5'] + [HtmlTokenType.TAG_OPEN_START, '0:0'], + [HtmlTokenType.TAG_OPEN_END, '0:2'], + [HtmlTokenType.TEXT, '0:3'], + [HtmlTokenType.TAG_CLOSE, '1:1'], + [HtmlTokenType.EOF, '1:5'], ]); }); it('should work with multiple newlines', () => { expect(tokenizeAndHumanizeLineColumn('\na')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '1:0'], - [HtmlTokenType.TEXT, '1:1'], [HtmlTokenType.TAG_CLOSE, '2:1'], - [HtmlTokenType.EOF, '2:5'] + [HtmlTokenType.TAG_OPEN_START, '0:0'], + [HtmlTokenType.TAG_OPEN_END, '1:0'], + [HtmlTokenType.TEXT, '1:1'], + [HtmlTokenType.TAG_CLOSE, '2:1'], + [HtmlTokenType.EOF, '2:5'], ]); }); it('should work with CR and LF', () => { expect(tokenizeAndHumanizeLineColumn('\r\na\r')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, '0:0'], [HtmlTokenType.TAG_OPEN_END, '1:0'], - [HtmlTokenType.TEXT, '1:1'], [HtmlTokenType.TAG_CLOSE, '2:1'], - [HtmlTokenType.EOF, '2:5'] + [HtmlTokenType.TAG_OPEN_START, '0:0'], + [HtmlTokenType.TAG_OPEN_END, '1:0'], + [HtmlTokenType.TEXT, '1:1'], + [HtmlTokenType.TAG_CLOSE, '2:1'], + [HtmlTokenType.EOF, '2:5'], ]); }); }); @@ -52,15 +58,19 @@ export function main() { describe('comments', () => { it('should parse comments', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.COMMENT_START], [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], - [HtmlTokenType.COMMENT_END], [HtmlTokenType.EOF] + [HtmlTokenType.COMMENT_START], + [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], + [HtmlTokenType.COMMENT_END], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => {expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.COMMENT_START, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.COMMENT_START, ''], + [HtmlTokenType.EOF, ''], ])}); it('should report { @@ -77,15 +87,19 @@ export function main() { it('should accept comments finishing by too many dashes (even number)', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.COMMENT_START, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.COMMENT_START, ''], + [HtmlTokenType.EOF, ''], ]); }); it('should accept comments finishing by too many dashes (odd number)', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.COMMENT_START, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.COMMENT_START, ''], + [HtmlTokenType.EOF, ''], ]); }); }); @@ -93,13 +107,15 @@ export function main() { describe('doctype', () => { it('should parse doctypes', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.DOC_TYPE, 'doctype html'], [HtmlTokenType.EOF] + [HtmlTokenType.DOC_TYPE, 'doctype html'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.DOC_TYPE, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.DOC_TYPE, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -113,15 +129,19 @@ export function main() { describe('CDATA', () => { it('should parse CDATA', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.CDATA_START], [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], - [HtmlTokenType.CDATA_END], [HtmlTokenType.EOF] + [HtmlTokenType.CDATA_START], + [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], + [HtmlTokenType.CDATA_END], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.CDATA_START, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.CDATA_START, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -141,36 +161,41 @@ export function main() { describe('open tags', () => { it('should parse open tags without prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'test'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse namespace prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, 'ns1', 'test'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, 'ns1', 'test'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse void tags', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END_VOID], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'test'], + [HtmlTokenType.TAG_OPEN_END_VOID], + [HtmlTokenType.EOF], ]); }); it('should allow whitespace after the tag name', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'test'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'test'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, ''], - [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_OPEN_START, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -179,88 +204,134 @@ export function main() { describe('attributes', () => { it('should parse attributes without prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], + ]); + }); + + it('should parse attributes with interpolation', () => { + expect(tokenizeAndHumanizeParts('')).toEqual([ + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, '{{v}}'], + [HtmlTokenType.ATTR_NAME, null, 'b'], + [HtmlTokenType.ATTR_VALUE, 's{{m}}e'], + [HtmlTokenType.ATTR_NAME, null, 'c'], + [HtmlTokenType.ATTR_VALUE, 's{{m//c}}e'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, 'ns1', 'a'], - [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, 'ns1', 'a'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes whose prefix is not valid', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, '(ns1:a)'], - [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, '(ns1:a)'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with single quote value', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'b'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with double quote value', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'b'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with unquoted value', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'b'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should allow whitespace', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'b'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'b'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with entities in values', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'AA'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'AA'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should not decode entities without trailing ";"', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, '&'], [HtmlTokenType.ATTR_NAME, null, 'b'], - [HtmlTokenType.ATTR_VALUE, 'c&&d'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, '&'], + [HtmlTokenType.ATTR_NAME, null, 'b'], + [HtmlTokenType.ATTR_VALUE, 'c&&d'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse attributes with "&" in values', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 'b && c &'], [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 'b && c &'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should parse values with CR and LF', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 't'], [HtmlTokenType.ATTR_NAME, null, 'a'], - [HtmlTokenType.ATTR_VALUE, 't\ne\ns\nt'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 't'], + [HtmlTokenType.ATTR_NAME, null, 'a'], + [HtmlTokenType.ATTR_VALUE, 't\ne\ns\nt'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.TAG_OPEN_START, ''], - [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_OPEN_START, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -269,25 +340,29 @@ export function main() { describe('closing tags', () => { it('should parse closing tags without prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_CLOSE, null, 'test'], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_CLOSE, null, 'test'], + [HtmlTokenType.EOF], ]); }); it('should parse closing tags with prefix', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_CLOSE, 'ns1', 'test'], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_CLOSE, 'ns1', 'test'], + [HtmlTokenType.EOF], ]); }); it('should allow whitespace', () => { expect(tokenizeAndHumanizeParts('')).toEqual([ - [HtmlTokenType.TAG_CLOSE, null, 'test'], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_CLOSE, null, 'test'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('')).toEqual([ - [HtmlTokenType.TAG_CLOSE, ''], [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_CLOSE, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -307,25 +382,29 @@ export function main() { describe('entities', () => { it('should parse named entities', () => { expect(tokenizeAndHumanizeParts('a&b')).toEqual([ - [HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'a&b'], + [HtmlTokenType.EOF], ]); }); it('should parse hexadecimal entities', () => { expect(tokenizeAndHumanizeParts('AA')).toEqual([ - [HtmlTokenType.TEXT, 'AA'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'AA'], + [HtmlTokenType.EOF], ]); }); it('should parse decimal entities', () => { expect(tokenizeAndHumanizeParts('A')).toEqual([ - [HtmlTokenType.TEXT, 'A'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'A'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('a&b')).toEqual([ - [HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF, ''] + [HtmlTokenType.TEXT, 'a&b'], + [HtmlTokenType.EOF, ''], ]); }); @@ -350,55 +429,57 @@ export function main() { describe('regular text', () => { it('should parse text', () => { expect(tokenizeAndHumanizeParts('a')).toEqual([ - [HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'a'], + [HtmlTokenType.EOF], ]); }); it('should parse interpolation', () => { - expect(tokenizeAndHumanizeParts('{{ a }}')).toEqual([ - [HtmlTokenType.TEXT, '{{ a }}'], [HtmlTokenType.EOF] - ]); - }); - - it('should detect interpolation end', () => { - expect(tokenizeAndHumanizeParts('{{value|filter:{params: {strict: true}}}}')).toEqual([ - [HtmlTokenType.TEXT, '{{ a }}'], [HtmlTokenType.EOF] + expect(tokenizeAndHumanizeParts('{{ a }}b{{ c // comment }}')).toEqual([ + [HtmlTokenType.TEXT, '{{ a }}b{{ c // comment }}'], + [HtmlTokenType.EOF], ]); }); it('should parse interpolation with custom markers', () => { expect(tokenizeAndHumanizeParts('{% a %}', null, {start: '{%', end: '%}'})).toEqual([ - [HtmlTokenType.TEXT, '{% a %}'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, '{% a %}'], + [HtmlTokenType.EOF], ]); }); it('should handle CR & LF', () => { expect(tokenizeAndHumanizeParts('t\ne\rs\r\nt')).toEqual([ - [HtmlTokenType.TEXT, 't\ne\ns\nt'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 't\ne\ns\nt'], + [HtmlTokenType.EOF], ]); }); it('should parse entities', () => { expect(tokenizeAndHumanizeParts('a&b')).toEqual([ - [HtmlTokenType.TEXT, 'a&b'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'a&b'], + [HtmlTokenType.EOF], ]); }); it('should parse text starting with "&"', () => { expect(tokenizeAndHumanizeParts('a && b &')).toEqual([ - [HtmlTokenType.TEXT, 'a && b &'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'a && b &'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans('a')).toEqual([ - [HtmlTokenType.TEXT, 'a'], [HtmlTokenType.EOF, ''] + [HtmlTokenType.TEXT, 'a'], + [HtmlTokenType.EOF, ''], ]); }); it('should allow "<" in text nodes', () => { expect(tokenizeAndHumanizeParts('{{ a < b ? c : d }}')).toEqual([ - [HtmlTokenType.TEXT, '{{ a < b ? c : d }}'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, '{{ a < b ? c : d }}'], + [HtmlTokenType.EOF], ]); expect(tokenizeAndHumanizeSourceSpans('

a')).toEqual([ @@ -410,103 +491,124 @@ export function main() { ]); expect(tokenizeAndHumanizeParts('< a>')).toEqual([ - [HtmlTokenType.TEXT, '< a>'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, '< a>'], + [HtmlTokenType.EOF], ]); }); - // TODO(vicb): make the lexer aware of Angular expressions - // see https://github.com/angular/angular/issues/5679 it('should parse valid start tag in interpolation', () => { expect(tokenizeAndHumanizeParts('{{ a d }}')).toEqual([ - [HtmlTokenType.TEXT, '{{ a '], [HtmlTokenType.TAG_OPEN_START, null, 'b'], - [HtmlTokenType.ATTR_NAME, null, '&&'], [HtmlTokenType.ATTR_NAME, null, 'c'], - [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.TEXT, ' d }}'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, '{{ a '], + [HtmlTokenType.TAG_OPEN_START, null, 'b'], + [HtmlTokenType.ATTR_NAME, null, '&&'], + [HtmlTokenType.ATTR_NAME, null, 'c'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.TEXT, ' d }}'], + [HtmlTokenType.EOF], ]); }); - }); describe('raw text', () => { it('should parse text', () => { expect(tokenizeAndHumanizeParts(``)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], [HtmlTokenType.TAG_CLOSE, null, 'script'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'script'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.RAW_TEXT, 't\ne\ns\nt'], + [HtmlTokenType.TAG_CLOSE, null, 'script'], + [HtmlTokenType.EOF], ]); }); it('should not detect entities', () => { expect(tokenizeAndHumanizeParts(``)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'script'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'script'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.RAW_TEXT, '&'], + [HtmlTokenType.TAG_CLOSE, null, 'script'], + [HtmlTokenType.EOF], ]); }); it('should ignore other opening tags', () => { expect(tokenizeAndHumanizeParts(``)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.RAW_TEXT, 'a

'], [HtmlTokenType.TAG_CLOSE, null, 'script'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'script'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.RAW_TEXT, 'a
'], + [HtmlTokenType.TAG_CLOSE, null, 'script'], + [HtmlTokenType.EOF], ]); }); it('should ignore other closing tags', () => { expect(tokenizeAndHumanizeParts(``)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'script'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'script'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'script'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.RAW_TEXT, 'a'], + [HtmlTokenType.TAG_CLOSE, null, 'script'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans(``)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, ''], - [HtmlTokenType.RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, ''], - [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_OPEN_START, ''], + [HtmlTokenType.RAW_TEXT, 'a'], + [HtmlTokenType.TAG_CLOSE, ''], + [HtmlTokenType.EOF, ''], ]); }); - }); describe('escapable raw text', () => { it('should parse text', () => { expect(tokenizeAndHumanizeParts(`t\ne\rs\r\nt`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.TAG_OPEN_START, null, 'title'], + [HtmlTokenType.TAG_OPEN_END], [HtmlTokenType.ESCAPABLE_RAW_TEXT, 't\ne\ns\nt'], - [HtmlTokenType.TAG_CLOSE, null, 'title'], [HtmlTokenType.EOF] + [HtmlTokenType.TAG_CLOSE, null, 'title'], + [HtmlTokenType.EOF], ]); }); it('should detect entities', () => { expect(tokenizeAndHumanizeParts(`&`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.ESCAPABLE_RAW_TEXT, '&'], [HtmlTokenType.TAG_CLOSE, null, 'title'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'title'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.ESCAPABLE_RAW_TEXT, '&'], + [HtmlTokenType.TAG_CLOSE, null, 'title'], + [HtmlTokenType.EOF], ]); }); it('should ignore other opening tags', () => { expect(tokenizeAndHumanizeParts(`a<div>`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a
'], [HtmlTokenType.TAG_CLOSE, null, 'title'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'title'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a
'], + [HtmlTokenType.TAG_CLOSE, null, 'title'], + [HtmlTokenType.EOF], ]); }); it('should ignore other closing tags', () => { expect(tokenizeAndHumanizeParts(`a</test>`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, null, 'title'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'title'], - [HtmlTokenType.EOF] + [HtmlTokenType.TAG_OPEN_START, null, 'title'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], + [HtmlTokenType.TAG_CLOSE, null, 'title'], + [HtmlTokenType.EOF], ]); }); it('should store the locations', () => { expect(tokenizeAndHumanizeSourceSpans(`a`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, ''], - [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, ''], - [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_OPEN_START, ''], + [HtmlTokenType.ESCAPABLE_RAW_TEXT, 'a'], + [HtmlTokenType.TAG_CLOSE, ''], + [HtmlTokenType.EOF, ''], ]); }); @@ -516,65 +618,94 @@ export function main() { it('should parse an expansion form', () => { expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true)) .toEqual([ - [HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'], - [HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, '=5'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'five'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_CASE_VALUE, 'foo'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'bar'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END], - [HtmlTokenType.EOF] + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'one.two'], + [HtmlTokenType.RAW_TEXT, 'three'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'four'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=5'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'five'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_CASE_VALUE, 'foo'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'bar'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.EOF], ]); }); it('should parse an expansion form with text elements surrounding it', () => { expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([ - [HtmlTokenType.TEXT, 'before'], [HtmlTokenType.EXPANSION_FORM_START], - [HtmlTokenType.RAW_TEXT, 'one.two'], [HtmlTokenType.RAW_TEXT, 'three'], - [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], [HtmlTokenType.EXPANSION_CASE_EXP_START], - [HtmlTokenType.TEXT, 'four'], [HtmlTokenType.EXPANSION_CASE_EXP_END], - [HtmlTokenType.EXPANSION_FORM_END], [HtmlTokenType.TEXT, 'after'], [HtmlTokenType.EOF] + [HtmlTokenType.TEXT, 'before'], + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'one.two'], + [HtmlTokenType.RAW_TEXT, 'three'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'four'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.TEXT, 'after'], + [HtmlTokenType.EOF], ]); }); it('should parse an expansion forms with elements in it', () => { expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four a}}', true)).toEqual([ - [HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'], - [HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four '], - [HtmlTokenType.TAG_OPEN_START, null, 'b'], [HtmlTokenType.TAG_OPEN_END], - [HtmlTokenType.TEXT, 'a'], [HtmlTokenType.TAG_CLOSE, null, 'b'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END], - [HtmlTokenType.EOF] + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'one.two'], + [HtmlTokenType.RAW_TEXT, 'three'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'four '], + [HtmlTokenType.TAG_OPEN_START, null, 'b'], + [HtmlTokenType.TAG_OPEN_END], + [HtmlTokenType.TEXT, 'a'], + [HtmlTokenType.TAG_CLOSE, null, 'b'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.EOF], ]); }); - it('should parse an expansion forms with interpolation in it', () => { + it('should parse an expansion forms containing an interpolation', () => { expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([ - [HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'], - [HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'four {{a}}'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END], - [HtmlTokenType.EOF] + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'one.two'], + [HtmlTokenType.RAW_TEXT, 'three'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'four {{a}}'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.EOF], ]); }); it('should parse nested expansion forms', () => { expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true)) .toEqual([ - [HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'one.two'], - [HtmlTokenType.RAW_TEXT, 'three'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'one.two'], + [HtmlTokenType.RAW_TEXT, 'three'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=4'], [HtmlTokenType.EXPANSION_CASE_EXP_START], - - [HtmlTokenType.EXPANSION_FORM_START], [HtmlTokenType.RAW_TEXT, 'xx'], - [HtmlTokenType.RAW_TEXT, 'yy'], [HtmlTokenType.EXPANSION_CASE_VALUE, '=x'], - [HtmlTokenType.EXPANSION_CASE_EXP_START], [HtmlTokenType.TEXT, 'one'], - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.EXPANSION_FORM_START], + [HtmlTokenType.RAW_TEXT, 'xx'], + [HtmlTokenType.RAW_TEXT, 'yy'], + [HtmlTokenType.EXPANSION_CASE_VALUE, '=x'], + [HtmlTokenType.EXPANSION_CASE_EXP_START], + [HtmlTokenType.TEXT, 'one'], + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], [HtmlTokenType.TEXT, ' '], - - [HtmlTokenType.EXPANSION_CASE_EXP_END], [HtmlTokenType.EXPANSION_FORM_END], - [HtmlTokenType.EOF] + [HtmlTokenType.EXPANSION_CASE_EXP_END], + [HtmlTokenType.EXPANSION_FORM_END], + [HtmlTokenType.EOF], ]); }); }); @@ -594,8 +725,11 @@ export function main() { describe('unicode characters', () => { it('should support unicode characters', () => { expect(tokenizeAndHumanizeSourceSpans(`

İ

`)).toEqual([ - [HtmlTokenType.TAG_OPEN_START, ''], - [HtmlTokenType.TEXT, 'İ'], [HtmlTokenType.TAG_CLOSE, '

'], [HtmlTokenType.EOF, ''] + [HtmlTokenType.TAG_OPEN_START, ''], + [HtmlTokenType.TEXT, 'İ'], + [HtmlTokenType.TAG_CLOSE, '

'], + [HtmlTokenType.EOF, ''], ]); }); }); @@ -606,8 +740,7 @@ export function main() { function tokenizeWithoutErrors( input: string, tokenizeExpansionForms: boolean = false, interpolationConfig?: InterpolationConfig): HtmlToken[] { - var tokenizeResult = tokenizeHtml( - input, 'someUrl', _getExpressionParser(), tokenizeExpansionForms, interpolationConfig); + var tokenizeResult = tokenizeHtml(input, 'someUrl', tokenizeExpansionForms, interpolationConfig); if (tokenizeResult.errors.length > 0) { const errorString = tokenizeResult.errors.join('\n'); @@ -638,10 +771,6 @@ function tokenizeAndHumanizeLineColumn(input: string): any[] { } function tokenizeAndHumanizeErrors(input: string): any[] { - return tokenizeHtml(input, 'someUrl', _getExpressionParser()) + return tokenizeHtml(input, 'someUrl') .errors.map(e => [e.tokenType, e.msg, humanizeLineColumn(e.span.start)]); } - -function _getExpressionParser(): ExpressionParser { - return new ExpressionParser(new ExpressionLexer()); -} diff --git a/modules/@angular/compiler/test/html_parser_spec.ts b/modules/@angular/compiler/test/html_parser_spec.ts index c24491cce2..ea0ed88b5b 100644 --- a/modules/@angular/compiler/test/html_parser_spec.ts +++ b/modules/@angular/compiler/test/html_parser_spec.ts @@ -6,8 +6,6 @@ * found in the LICENSE file at https://angular.io/license */ -import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer'; -import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser'; import {HtmlAttrAst, HtmlCommentAst, HtmlElementAst, HtmlExpansionAst, HtmlExpansionCaseAst, HtmlTextAst} from '@angular/compiler/src/html_ast'; import {HtmlTokenType} from '@angular/compiler/src/html_lexer'; import {HtmlParseTreeResult, HtmlParser, HtmlTreeError} from '@angular/compiler/src/html_parser'; @@ -19,14 +17,8 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './html_as export function main() { describe('HtmlParser', () => { var parser: HtmlParser; - var expLexer: ExpressionLexer; - var expParser: ExpressionParser; - beforeEach(() => { - expLexer = new ExpressionLexer(); - expParser = new ExpressionParser(expLexer); - parser = new HtmlParser(expParser); - }); + beforeEach(() => { parser = new HtmlParser(); }); describe('parse', () => { describe('text nodes', () => { diff --git a/modules/@angular/compiler/test/i18n/expander_spec.ts b/modules/@angular/compiler/test/i18n/expander_spec.ts index 8f5f6844c4..a5a3e9c6a8 100644 --- a/modules/@angular/compiler/test/i18n/expander_spec.ts +++ b/modules/@angular/compiler/test/i18n/expander_spec.ts @@ -6,8 +6,6 @@ * found in the LICENSE file at https://angular.io/license */ -import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer'; -import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser'; import {HtmlAttrAst, HtmlElementAst, HtmlTextAst} from '@angular/compiler/src/html_ast'; import {HtmlParser} from '@angular/compiler/src/html_parser'; import {ExpansionResult, expandNodes} from '@angular/compiler/src/i18n/expander'; @@ -18,9 +16,7 @@ import {ddescribe, describe, expect, iit, it} from '@angular/core/testing/testin export function main() { describe('Expander', () => { function expand(template: string): ExpansionResult { - const expLexer = new ExpressionLexer(); - const expParser = new ExpressionParser(expLexer); - const htmlParser = new HtmlParser(expParser); + const htmlParser = new HtmlParser(); const res = htmlParser.parse(template, 'url', true); return expandNodes(res.rootNodes); } diff --git a/modules/@angular/compiler/test/i18n/i18n_html_parser_spec.ts b/modules/@angular/compiler/test/i18n/i18n_html_parser_spec.ts index c18f779a7c..de8a199037 100644 --- a/modules/@angular/compiler/test/i18n/i18n_html_parser_spec.ts +++ b/modules/@angular/compiler/test/i18n/i18n_html_parser_spec.ts @@ -26,14 +26,15 @@ export function main() { template: string, messages: {[key: string]: string}, implicitTags: string[] = [], implicitAttrs: {[k: string]: string[]} = {}, interpolation?: InterpolationConfig): HtmlParseTreeResult { - var expParser = new ExpressionParser(new ExpressionLexer()); - let htmlParser = new HtmlParser(expParser); + let htmlParser = new HtmlParser(); let msgs = ''; StringMapWrapper.forEach( messages, (v: string, k: string) => msgs += `${v}`); let res = deserializeXmb(`${msgs}`, 'someUrl'); + const expParser = new ExpressionParser(new ExpressionLexer()); + return new I18nHtmlParser( htmlParser, expParser, res.content, res.messages, implicitTags, implicitAttrs) .parse(template, 'someurl', true, interpolation); diff --git a/modules/@angular/compiler/test/i18n/message_extractor_spec.ts b/modules/@angular/compiler/test/i18n/message_extractor_spec.ts index 9d4964e83a..3b45af10cd 100644 --- a/modules/@angular/compiler/test/i18n/message_extractor_spec.ts +++ b/modules/@angular/compiler/test/i18n/message_extractor_spec.ts @@ -20,7 +20,7 @@ export function main() { beforeEach(() => { const expParser = new ExpressionParser(new ExpressionLexer()); - const htmlParser = new HtmlParser(expParser); + const htmlParser = new HtmlParser(); // TODO: pass expression parser extractor = new MessageExtractor(htmlParser, expParser, ['i18n-tag'], {'i18n-el': ['trans']}); }); diff --git a/modules/@angular/compiler/test/schema/dom_element_schema_registry_spec.ts b/modules/@angular/compiler/test/schema/dom_element_schema_registry_spec.ts index 0622e47ddb..d49393b8eb 100644 --- a/modules/@angular/compiler/test/schema/dom_element_schema_registry_spec.ts +++ b/modules/@angular/compiler/test/schema/dom_element_schema_registry_spec.ts @@ -6,8 +6,6 @@ * found in the LICENSE file at https://angular.io/license */ -import {Lexer as ExpressionLexer} from '@angular/compiler/src/expression_parser/lexer'; -import {Parser as ExpressionParser} from '@angular/compiler/src/expression_parser/parser'; import {HtmlElementAst} from '@angular/compiler/src/html_ast'; import {HtmlParser} from '@angular/compiler/src/html_parser'; import {DomElementSchemaRegistry} from '@angular/compiler/src/schema/dom_element_schema_registry'; @@ -70,10 +68,8 @@ export function main() { }); it('should detect properties on namespaced elements', () => { - const expLexer = new ExpressionLexer(); - const expParser = new ExpressionParser(expLexer); - let htmlAst = new HtmlParser(expParser).parse('', 'TestComp'); - let nodeName = (htmlAst.rootNodes[0]).name; + const htmlAst = new HtmlParser().parse('', 'TestComp'); + const nodeName = (htmlAst.rootNodes[0]).name; expect(registry.hasProperty(nodeName, 'type')).toBeTruthy(); });