refactor(compiler): use options argument for parsers (#28055)

This commit consolidates the options that can modify the
parsing of text (e.g. HTML, Angular templates, CSS, i18n)
into an AST for further processing into a single `options`
hash.

This makes the code cleaner and more readable, but also
enables us to support further options to parsing without
triggering wide ranging changes to code that should not
be affected by these new options.  Specifically, it will let
us pass information about the placement of a template
that is being parsed in its containing file, which is essential
for accurate SourceMap processing.

PR Close #28055
This commit is contained in:
Pete Bacon Darwin
2019-02-08 22:10:19 +00:00
committed by Misko Hevery
parent 81df5dcfc0
commit 673ac2945c
25 changed files with 200 additions and 169 deletions

View File

@ -7,7 +7,7 @@
*/
import {getHtmlTagDefinition} from './html_tags';
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser';
export {ParseTreeResult, TreeError} from './parser';
@ -15,9 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class HtmlParser extends Parser {
constructor() { super(getHtmlTagDefinition); }
parse(
source: string, url: string, parseExpansionForms: boolean = false,
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
return super.parse(source, url, parseExpansionForms, interpolationConfig);
parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
return super.parse(source, url, options);
}
}

View File

@ -49,14 +49,20 @@ export class TokenizeResult {
constructor(public tokens: Token[], public errors: TokenError[]) {}
}
/**
* Options that modify how the text is tokenized.
*/
export interface TokenizeOptions {
/** Whether to tokenize ICU messages (considered as text nodes when false). */
tokenizeExpansionForms?: boolean;
/** How to tokenize interpolation markers. */
interpolationConfig?: InterpolationConfig;
}
export function tokenize(
source: string, url: string, getTagDefinition: (tagName: string) => TagDefinition,
tokenizeExpansionForms: boolean = false,
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): TokenizeResult {
return new _Tokenizer(
new ParseSourceFile(source, url), getTagDefinition, tokenizeExpansionForms,
interpolationConfig)
.tokenize();
options: TokenizeOptions = {}): TokenizeResult {
return new _Tokenizer(new ParseSourceFile(source, url), getTagDefinition, options).tokenize();
}
const _CR_OR_CRLF_REGEXP = /\r\n?/g;
@ -78,6 +84,8 @@ class _ControlFlowError {
class _Tokenizer {
private _input: string;
private _length: number;
private _tokenizeIcu: boolean;
private _interpolationConfig: InterpolationConfig;
// Note: this is always lowercase!
private _peek: number = -1;
private _nextPeek: number = -1;
@ -102,8 +110,9 @@ class _Tokenizer {
*/
constructor(
private _file: ParseSourceFile, private _getTagDefinition: (tagName: string) => TagDefinition,
private _tokenizeIcu: boolean,
private _interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG) {
options: TokenizeOptions) {
this._tokenizeIcu = options.tokenizeExpansionForms || false;
this._interpolationConfig = options.interpolationConfig || DEFAULT_INTERPOLATION_CONFIG;
this._input = _file.content;
this._length = _file.content.length;
this._advance();

View File

@ -9,7 +9,6 @@
import {ParseError, ParseSourceSpan} from '../parse_util';
import * as html from './ast';
import {DEFAULT_INTERPOLATION_CONFIG, InterpolationConfig} from './interpolation_config';
import * as lex from './lexer';
import {TagDefinition, getNsPrefix, isNgContainer, mergeNsAndName} from './tags';
@ -30,11 +29,8 @@ export class ParseTreeResult {
export class Parser {
constructor(public getTagDefinition: (tagName: string) => TagDefinition) {}
parse(
source: string, url: string, parseExpansionForms: boolean = false,
interpolationConfig: InterpolationConfig = DEFAULT_INTERPOLATION_CONFIG): ParseTreeResult {
const tokensAndErrors =
lex.tokenize(source, url, this.getTagDefinition, parseExpansionForms, interpolationConfig);
parse(source: string, url: string, options?: lex.TokenizeOptions): ParseTreeResult {
const tokensAndErrors = lex.tokenize(source, url, this.getTagDefinition, options);
const treeAndErrors = new _TreeBuilder(tokensAndErrors.tokens, this.getTagDefinition).build();

View File

@ -6,6 +6,7 @@
* found in the LICENSE file at https://angular.io/license
*/
import {TokenizeOptions} from './lexer';
import {ParseTreeResult, Parser} from './parser';
import {getXmlTagDefinition} from './xml_tags';
@ -14,7 +15,7 @@ export {ParseTreeResult, TreeError} from './parser';
export class XmlParser extends Parser {
constructor() { super(getXmlTagDefinition); }
parse(source: string, url: string, parseExpansionForms: boolean = false): ParseTreeResult {
return super.parse(source, url, parseExpansionForms);
parse(source: string, url: string, options?: TokenizeOptions): ParseTreeResult {
return super.parse(source, url, options);
}
}