This commit consolidates the options that can modify the parsing of text (e.g. HTML, Angular templates, CSS, i18n) into an AST for further processing into a single `options` hash. This makes the code cleaner and more readable, but also enables us to support further options to parsing without triggering wide ranging changes to code that should not be affected by these new options. Specifically, it will let us pass information about the placement of a template that is being parsed in its containing file, which is essential for accurate SourceMap processing. PR Close #28055 PR Close #28736
This commit is contained in:

committed by
Andrew Kushnir

parent
dfb331cd18
commit
1a0b2ff4fb
@ -501,7 +501,7 @@ import {serializeNodes as serializeHtmlNodes} from '../ml_parser/util/util';
|
||||
|
||||
function parseHtml(html: string): html.Node[] {
|
||||
const htmlParser = new HtmlParser();
|
||||
const parseResult = htmlParser.parse(html, 'extractor spec', true);
|
||||
const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
|
||||
if (parseResult.errors.length > 1) {
|
||||
throw new Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
|
||||
}
|
||||
|
@ -330,7 +330,7 @@ export function _extractMessages(
|
||||
html: string, implicitTags: string[] = [],
|
||||
implicitAttrs: {[k: string]: string[]} = {}): Message[] {
|
||||
const htmlParser = new HtmlParser();
|
||||
const parseResult = htmlParser.parse(html, 'extractor spec', true);
|
||||
const parseResult = htmlParser.parse(html, 'extractor spec', {tokenizeExpansionForms: true});
|
||||
if (parseResult.errors.length > 1) {
|
||||
throw Error(`unexpected parse errors: ${parseResult.errors.join('\n')}`);
|
||||
}
|
||||
|
@ -35,13 +35,13 @@ import {serializeNodes} from './util/util';
|
||||
|
||||
it('should support expansion', () => {
|
||||
const html = '{number, plural, =0 {none} =1 {one} other {many}}';
|
||||
const ast = parser.parse(html, 'url', true);
|
||||
const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
|
||||
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
|
||||
});
|
||||
|
||||
it('should support comment', () => {
|
||||
const html = '<!--comment-->';
|
||||
const ast = parser.parse(html, 'url', true);
|
||||
const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
|
||||
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
|
||||
});
|
||||
|
||||
@ -51,9 +51,9 @@ import {serializeNodes} from './util/util';
|
||||
<!--comment-->
|
||||
<p expansion="true">
|
||||
{number, plural, =0 {{sex, select, other {<b>?</b>}}}}
|
||||
</p>
|
||||
</p>
|
||||
</div>`;
|
||||
const ast = parser.parse(html, 'url', true);
|
||||
const ast = parser.parse(html, 'url', {tokenizeExpansionForms: true});
|
||||
expect(serializeNodes(ast.rootNodes)).toEqual([html]);
|
||||
});
|
||||
});
|
||||
|
@ -300,7 +300,7 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
|
||||
it('should parse out expansion forms', () => {
|
||||
const parsed = parser.parse(
|
||||
`<div>before{messages.length, plural, =0 {You have <b>no</b> messages} =1 {One {{message}}}}after</div>`,
|
||||
'TestComp', true);
|
||||
'TestComp', {tokenizeExpansionForms: true});
|
||||
|
||||
expect(humanizeDom(parsed)).toEqual([
|
||||
[html.Element, 'div', 0],
|
||||
@ -324,8 +324,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
|
||||
});
|
||||
|
||||
it('should parse out expansion forms', () => {
|
||||
const parsed =
|
||||
parser.parse(`<div><span>{a, plural, =0 {b}}</span></div>`, 'TestComp', true);
|
||||
const parsed = parser.parse(
|
||||
`<div><span>{a, plural, =0 {b}}</span></div>`, 'TestComp',
|
||||
{tokenizeExpansionForms: true});
|
||||
|
||||
expect(humanizeDom(parsed)).toEqual([
|
||||
[html.Element, 'div', 0],
|
||||
@ -337,7 +338,8 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
|
||||
|
||||
it('should parse out nested expansion forms', () => {
|
||||
const parsed = parser.parse(
|
||||
`{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp', true);
|
||||
`{messages.length, plural, =0 { {p.gender, select, male {m}} }}`, 'TestComp',
|
||||
{tokenizeExpansionForms: true});
|
||||
expect(humanizeDom(parsed)).toEqual([
|
||||
[html.Expansion, 'messages.length', 'plural', 0],
|
||||
[html.ExpansionCase, '=0', 1],
|
||||
@ -353,26 +355,31 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
|
||||
});
|
||||
|
||||
it('should error when expansion form is not closed', () => {
|
||||
const p = parser.parse(`{messages.length, plural, =0 {one}`, 'TestComp', true);
|
||||
const p = parser.parse(
|
||||
`{messages.length, plural, =0 {one}`, 'TestComp', {tokenizeExpansionForms: true});
|
||||
expect(humanizeErrors(p.errors)).toEqual([
|
||||
[null, 'Invalid ICU message. Missing \'}\'.', '0:34']
|
||||
]);
|
||||
});
|
||||
|
||||
it('should support ICU expressions with cases that contain numbers', () => {
|
||||
const p = parser.parse(`{sex, select, male {m} female {f} 0 {other}}`, 'TestComp', true);
|
||||
const p = parser.parse(
|
||||
`{sex, select, male {m} female {f} 0 {other}}`, 'TestComp',
|
||||
{tokenizeExpansionForms: true});
|
||||
expect(p.errors.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should error when expansion case is not closed', () => {
|
||||
const p = parser.parse(`{messages.length, plural, =0 {one`, 'TestComp', true);
|
||||
const p = parser.parse(
|
||||
`{messages.length, plural, =0 {one`, 'TestComp', {tokenizeExpansionForms: true});
|
||||
expect(humanizeErrors(p.errors)).toEqual([
|
||||
[null, 'Invalid ICU message. Missing \'}\'.', '0:29']
|
||||
]);
|
||||
});
|
||||
|
||||
it('should error when invalid html in the case', () => {
|
||||
const p = parser.parse(`{messages.length, plural, =0 {<b/>}`, 'TestComp', true);
|
||||
const p = parser.parse(
|
||||
`{messages.length, plural, =0 {<b/>}`, 'TestComp', {tokenizeExpansionForms: true});
|
||||
expect(humanizeErrors(p.errors)).toEqual([
|
||||
['b', 'Only void and foreign elements can be self closed "b"', '0:30']
|
||||
]);
|
||||
@ -404,8 +411,9 @@ import {humanizeDom, humanizeDomSourceSpans, humanizeLineColumn} from './ast_spe
|
||||
});
|
||||
|
||||
it('should support expansion form', () => {
|
||||
expect(humanizeDomSourceSpans(
|
||||
parser.parse('<div>{count, plural, =0 {msg}}</div>', 'TestComp', true)))
|
||||
expect(humanizeDomSourceSpans(parser.parse(
|
||||
'<div>{count, plural, =0 {msg}}</div>', 'TestComp',
|
||||
{tokenizeExpansionForms: true})))
|
||||
.toEqual([
|
||||
[html.Element, 'div', 0, '<div>'],
|
||||
[html.Expansion, 'count', 'plural', 1, '{count, plural, =0 {msg}}'],
|
||||
|
@ -17,7 +17,7 @@ import {humanizeNodes} from './ast_spec_utils';
|
||||
describe('Expander', () => {
|
||||
function expand(template: string): ExpansionResult {
|
||||
const htmlParser = new HtmlParser();
|
||||
const res = htmlParser.parse(template, 'url', true);
|
||||
const res = htmlParser.parse(template, 'url', {tokenizeExpansionForms: true});
|
||||
return expandNodes(res.rootNodes);
|
||||
}
|
||||
|
||||
|
@ -443,10 +443,11 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
});
|
||||
|
||||
it('should parse interpolation with custom markers', () => {
|
||||
expect(tokenizeAndHumanizeParts('{% a %}', null !, {start: '{%', end: '%}'})).toEqual([
|
||||
[lex.TokenType.TEXT, '{% a %}'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts('{% a %}', {interpolationConfig: {start: '{%', end: '%}'}}))
|
||||
.toEqual([
|
||||
[lex.TokenType.TEXT, '{% a %}'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle CR & LF', () => {
|
||||
@ -524,13 +525,15 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
});
|
||||
|
||||
it('should treat expansion form as text when they are not parsed', () => {
|
||||
expect(tokenizeAndHumanizeParts('<span>{a, b, =4 {c}}</span>', false)).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'span'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TEXT, '{a, b, =4 {c}}'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'span'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'<span>{a, b, =4 {c}}</span>', {tokenizeExpansionForms: false}))
|
||||
.toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'span'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TEXT, '{a, b, =4 {c}}'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'span'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -641,7 +644,9 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
|
||||
describe('expansion forms', () => {
|
||||
it('should parse an expansion form', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four} =5 {five} foo {bar} }', true))
|
||||
expect(
|
||||
tokenizeAndHumanizeParts(
|
||||
'{one.two, three, =4 {four} =5 {five} foo {bar} }', {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
@ -664,75 +669,84 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
});
|
||||
|
||||
it('should parse an expansion form with text elements surrounding it', () => {
|
||||
expect(tokenizeAndHumanizeParts('before{one.two, three, =4 {four}}after', true)).toEqual([
|
||||
[lex.TokenType.TEXT, 'before'],
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.TEXT, 'after'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'before{one.two, three, =4 {four}}after', {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.TEXT, 'before'],
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.TEXT, 'after'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion form as a tag single child', () => {
|
||||
expect(tokenizeAndHumanizeParts('<div><span>{a, b, =4 {c}}</span></div>', true)).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'div'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'span'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'a'],
|
||||
[lex.TokenType.RAW_TEXT, 'b'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'c'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'span'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'div'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'<div><span>{a, b, =4 {c}}</span></div>', {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'div'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'span'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'a'],
|
||||
[lex.TokenType.RAW_TEXT, 'b'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'c'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'span'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'div'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion forms with elements in it', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four <b>a</b>}}', true)).toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four '],
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'b'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TEXT, 'a'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'b'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'{one.two, three, =4 {four <b>a</b>}}', {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four '],
|
||||
[lex.TokenType.TAG_OPEN_START, null, 'b'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TEXT, 'a'],
|
||||
[lex.TokenType.TAG_CLOSE, null, 'b'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse an expansion forms containing an interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('{one.two, three, =4 {four {{a}}}}', true)).toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four {{a}}'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'{one.two, three, =4 {four {{a}}}}', {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
[lex.TokenType.RAW_TEXT, 'three'],
|
||||
[lex.TokenType.EXPANSION_CASE_VALUE, '=4'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_START],
|
||||
[lex.TokenType.TEXT, 'four {{a}}'],
|
||||
[lex.TokenType.EXPANSION_CASE_EXP_END],
|
||||
[lex.TokenType.EXPANSION_FORM_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse nested expansion forms', () => {
|
||||
expect(tokenizeAndHumanizeParts(`{one.two, three, =4 { {xx, yy, =x {one}} }}`, true))
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
`{one.two, three, =4 { {xx, yy, =x {one}} }}`, {tokenizeExpansionForms: true}))
|
||||
.toEqual([
|
||||
[lex.TokenType.EXPANSION_FORM_START],
|
||||
[lex.TokenType.RAW_TEXT, 'one.two'],
|
||||
@ -757,11 +771,12 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
|
||||
describe('errors', () => {
|
||||
it('should report unescaped "{" on error', () => {
|
||||
expect(tokenizeAndHumanizeErrors(`<p>before { after</p>`, true)).toEqual([[
|
||||
lex.TokenType.RAW_TEXT,
|
||||
`Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`,
|
||||
'0:21',
|
||||
]]);
|
||||
expect(tokenizeAndHumanizeErrors(`<p>before { after</p>`, {tokenizeExpansionForms: true}))
|
||||
.toEqual([[
|
||||
lex.TokenType.RAW_TEXT,
|
||||
`Unexpected character "EOF" (Do you have an unescaped "{" in your template? Use "{{ '{' }}") to escape it.)`,
|
||||
'0:21',
|
||||
]]);
|
||||
});
|
||||
|
||||
it('should include 2 lines of context in message', () => {
|
||||
@ -790,11 +805,8 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
||||
});
|
||||
}
|
||||
|
||||
function tokenizeWithoutErrors(
|
||||
input: string, tokenizeExpansionForms: boolean = false,
|
||||
interpolationConfig?: InterpolationConfig): lex.Token[] {
|
||||
const tokenizeResult = lex.tokenize(
|
||||
input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms, interpolationConfig);
|
||||
function tokenizeWithoutErrors(input: string, options?: lex.TokenizeOptions): lex.Token[] {
|
||||
const tokenizeResult = lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options);
|
||||
|
||||
if (tokenizeResult.errors.length > 0) {
|
||||
const errorString = tokenizeResult.errors.join('\n');
|
||||
@ -804,27 +816,25 @@ function tokenizeWithoutErrors(
|
||||
return tokenizeResult.tokens;
|
||||
}
|
||||
|
||||
function tokenizeAndHumanizeParts(
|
||||
input: string, tokenizeExpansionForms: boolean = false,
|
||||
interpolationConfig?: InterpolationConfig): any[] {
|
||||
return tokenizeWithoutErrors(input, tokenizeExpansionForms, interpolationConfig)
|
||||
.map(token => [<any>token.type].concat(token.parts));
|
||||
function tokenizeAndHumanizeParts(input: string, options?: lex.TokenizeOptions): any[] {
|
||||
return tokenizeWithoutErrors(input, options).map(token => [<any>token.type].concat(token.parts));
|
||||
}
|
||||
|
||||
function tokenizeAndHumanizeSourceSpans(input: string): any[] {
|
||||
return tokenizeWithoutErrors(input).map(token => [<any>token.type, token.sourceSpan.toString()]);
|
||||
function tokenizeAndHumanizeSourceSpans(input: string, options?: lex.TokenizeOptions): any[] {
|
||||
return tokenizeWithoutErrors(input, options)
|
||||
.map(token => [<any>token.type, token.sourceSpan.toString()]);
|
||||
}
|
||||
|
||||
function humanizeLineColumn(location: ParseLocation): string {
|
||||
return `${location.line}:${location.col}`;
|
||||
}
|
||||
|
||||
function tokenizeAndHumanizeLineColumn(input: string): any[] {
|
||||
return tokenizeWithoutErrors(input).map(
|
||||
token => [<any>token.type, humanizeLineColumn(token.sourceSpan.start)]);
|
||||
function tokenizeAndHumanizeLineColumn(input: string, options?: lex.TokenizeOptions): any[] {
|
||||
return tokenizeWithoutErrors(input, options)
|
||||
.map(token => [<any>token.type, humanizeLineColumn(token.sourceSpan.start)]);
|
||||
}
|
||||
|
||||
function tokenizeAndHumanizeErrors(input: string, tokenizeExpansionForms: boolean = false): any[] {
|
||||
return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, tokenizeExpansionForms)
|
||||
function tokenizeAndHumanizeErrors(input: string, options?: lex.TokenizeOptions): any[] {
|
||||
return lex.tokenize(input, 'someUrl', getHtmlTagDefinition, options)
|
||||
.errors.map(e => [<any>e.tokenType, e.msg, humanizeLineColumn(e.span.start)]);
|
||||
}
|
||||
|
@ -81,7 +81,8 @@ export function parseR3(
|
||||
input: string, options: {preserveWhitespaces?: boolean} = {}): Render3ParseResult {
|
||||
const htmlParser = new HtmlParser();
|
||||
|
||||
const parseResult = htmlParser.parse(input, 'path:://to/template', true);
|
||||
const parseResult =
|
||||
htmlParser.parse(input, 'path:://to/template', {tokenizeExpansionForms: true});
|
||||
|
||||
if (parseResult.errors.length > 0) {
|
||||
const msg = parseResult.errors.map(e => e.toString()).join('\n');
|
||||
|
Reference in New Issue
Block a user