refactor(ivy): update the compiler to emit $localize
tags (#31609)
This commit changes the Angular compiler (ivy-only) to generate `$localize` tagged strings for component templates that use `i18n` attributes. BREAKING CHANGE Since `$localize` is a global function, it must be included in any applications that use i18n. This is achieved by importing the `@angular/localize` package into an appropriate bundle, where it will be executed before the renderer needs to call `$localize`. For CLI based projects, this is best done in the `polyfills.ts` file. ```ts import '@angular/localize'; ``` For non-CLI applications this could be added as a script to the index.html file or another suitable script file. PR Close #31609
This commit is contained in:

committed by
Misko Hevery

parent
b21397bde9
commit
fa79f51645
@ -15,12 +15,14 @@ import {NgtscProgram} from '../../src/ngtsc/program';
|
||||
const IDENTIFIER = /[A-Za-z_$ɵ][A-Za-z0-9_$]*/;
|
||||
const OPERATOR =
|
||||
/!|\?|%|\*|\/|\^|&&?|\|\|?|\(|\)|\{|\}|\[|\]|:|;|<=?|>=?|={1,3}|!==?|=>|\+\+?|--?|@|,|\.|\.\.\./;
|
||||
const STRING = /'(\\'|[^'])*'|"(\\"|[^"])*"|`(\\`[\s\S])*?`/;
|
||||
const STRING = /'(\\'|[^'])*'|"(\\"|[^"])*"/;
|
||||
const BACKTICK_STRING = /\\`(([\s\S]*?)(\$\{[^}]*?\})?)*?\\`/;
|
||||
const BACKTICK_INTERPOLATION = /(\$\{[^}]*\})/;
|
||||
const NUMBER = /\d+/;
|
||||
|
||||
const ELLIPSIS = '…';
|
||||
const TOKEN = new RegExp(
|
||||
`\\s*((${IDENTIFIER.source})|(${OPERATOR.source})|(${STRING.source})|${NUMBER.source}|${ELLIPSIS})\\s*`,
|
||||
`\\s*((${IDENTIFIER.source})|(${OPERATOR.source})|(${STRING.source})|(${BACKTICK_STRING.source})|${NUMBER.source}|${ELLIPSIS})\\s*`,
|
||||
'y');
|
||||
|
||||
type Piece = string | RegExp;
|
||||
@ -30,6 +32,8 @@ const SKIP = /(?:.|\n|\r)*/;
|
||||
const ERROR_CONTEXT_WIDTH = 30;
|
||||
// Transform the expected output to set of tokens
|
||||
function tokenize(text: string): Piece[] {
|
||||
// TOKEN.lastIndex is stateful so we cache the `lastIndex` and restore it at the end of the call.
|
||||
const lastIndex = TOKEN.lastIndex;
|
||||
TOKEN.lastIndex = 0;
|
||||
|
||||
let match: RegExpMatchArray|null;
|
||||
@ -42,6 +46,8 @@ function tokenize(text: string): Piece[] {
|
||||
pieces.push(IDENTIFIER);
|
||||
} else if (token === ELLIPSIS) {
|
||||
pieces.push(SKIP);
|
||||
} else if (match = BACKTICK_STRING.exec(token)) {
|
||||
pieces.push(...tokenizeBackTickString(token));
|
||||
} else {
|
||||
pieces.push(token);
|
||||
}
|
||||
@ -57,10 +63,33 @@ function tokenize(text: string): Piece[] {
|
||||
`Invalid test, no token found for "${text[tokenizedTextEnd]}" ` +
|
||||
`(context = '${text.substr(from, to)}...'`);
|
||||
}
|
||||
// Reset the lastIndex in case we are in a recursive `tokenize()` call.
|
||||
TOKEN.lastIndex = lastIndex;
|
||||
|
||||
return pieces;
|
||||
}
|
||||
|
||||
/**
|
||||
* Back-ticks are escaped as "\`" so we must strip the backslashes.
|
||||
* Also the string will likely contain interpolations and if an interpolation holds an
|
||||
* identifier we will need to match that later. So tokenize the interpolation too!
|
||||
*/
|
||||
function tokenizeBackTickString(str: string): Piece[] {
|
||||
const pieces: Piece[] = ['`'];
|
||||
const backTickPieces = str.slice(2, -2).split(BACKTICK_INTERPOLATION);
|
||||
backTickPieces.forEach((backTickPiece) => {
|
||||
if (BACKTICK_INTERPOLATION.test(backTickPiece)) {
|
||||
// An interpolation so tokenize this expression
|
||||
pieces.push(...tokenize(backTickPiece));
|
||||
} else {
|
||||
// Not an interpolation so just add it as a piece
|
||||
pieces.push(backTickPiece);
|
||||
}
|
||||
});
|
||||
pieces.push('`');
|
||||
return pieces;
|
||||
}
|
||||
|
||||
export function expectEmit(
|
||||
source: string, expected: string, description: string,
|
||||
assertIdentifiers?: {[name: string]: RegExp}) {
|
||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user