style(lint): re-format modules/@angular
This commit is contained in:
@ -1,6 +1,5 @@
|
||||
import {ddescribe, describe, it, expect} from '@angular/core/testing';
|
||||
|
||||
import {Lexer, Token} from '@angular/compiler/src/expression_parser/lexer';
|
||||
import {ddescribe, describe, expect, it} from '@angular/core/testing';
|
||||
|
||||
import {StringWrapper} from '../../src/facade/lang';
|
||||
|
||||
@ -13,36 +12,42 @@ function expectToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */)
|
||||
expect(token.index).toEqual(index);
|
||||
}
|
||||
|
||||
function expectCharacterToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, character: any /** TODO #9100 */) {
|
||||
function expectCharacterToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, character: any /** TODO #9100 */) {
|
||||
expect(character.length).toBe(1);
|
||||
expectToken(token, index);
|
||||
expect(token.isCharacter(StringWrapper.charCodeAt(character, 0))).toBe(true);
|
||||
}
|
||||
|
||||
function expectOperatorToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, operator: any /** TODO #9100 */) {
|
||||
function expectOperatorToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, operator: any /** TODO #9100 */) {
|
||||
expectToken(token, index);
|
||||
expect(token.isOperator(operator)).toBe(true);
|
||||
}
|
||||
|
||||
function expectNumberToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, n: any /** TODO #9100 */) {
|
||||
function expectNumberToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, n: any /** TODO #9100 */) {
|
||||
expectToken(token, index);
|
||||
expect(token.isNumber()).toBe(true);
|
||||
expect(token.toNumber()).toEqual(n);
|
||||
}
|
||||
|
||||
function expectStringToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, str: any /** TODO #9100 */) {
|
||||
function expectStringToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, str: any /** TODO #9100 */) {
|
||||
expectToken(token, index);
|
||||
expect(token.isString()).toBe(true);
|
||||
expect(token.toString()).toEqual(str);
|
||||
}
|
||||
|
||||
function expectIdentifierToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, identifier: any /** TODO #9100 */) {
|
||||
function expectIdentifierToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, identifier: any /** TODO #9100 */) {
|
||||
expectToken(token, index);
|
||||
expect(token.isIdentifier()).toBe(true);
|
||||
expect(token.toString()).toEqual(identifier);
|
||||
}
|
||||
|
||||
function expectKeywordToken(token: any /** TODO #9100 */, index: any /** TODO #9100 */, keyword: any /** TODO #9100 */) {
|
||||
function expectKeywordToken(
|
||||
token: any /** TODO #9100 */, index: any /** TODO #9100 */, keyword: any /** TODO #9100 */) {
|
||||
expectToken(token, index);
|
||||
expect(token.isKeyword()).toBe(true);
|
||||
expect(token.toString()).toEqual(keyword);
|
||||
@ -52,13 +57,13 @@ export function main() {
|
||||
describe('lexer', function() {
|
||||
describe('token', function() {
|
||||
it('should tokenize a simple identifier', function() {
|
||||
var tokens: number[] = lex("j");
|
||||
var tokens: number[] = lex('j');
|
||||
expect(tokens.length).toEqual(1);
|
||||
expectIdentifierToken(tokens[0], 0, 'j');
|
||||
});
|
||||
|
||||
it('should tokenize a dotted identifier', function() {
|
||||
var tokens: number[] = lex("j.k");
|
||||
var tokens: number[] = lex('j.k');
|
||||
expect(tokens.length).toEqual(3);
|
||||
expectIdentifierToken(tokens[0], 0, 'j');
|
||||
expectCharacterToken(tokens[1], 1, '.');
|
||||
@ -66,35 +71,35 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should tokenize an operator', function() {
|
||||
var tokens: number[] = lex("j-k");
|
||||
var tokens: number[] = lex('j-k');
|
||||
expect(tokens.length).toEqual(3);
|
||||
expectOperatorToken(tokens[1], 1, '-');
|
||||
});
|
||||
|
||||
it('should tokenize an indexed operator', function() {
|
||||
var tokens: number[] = lex("j[k]");
|
||||
var tokens: number[] = lex('j[k]');
|
||||
expect(tokens.length).toEqual(4);
|
||||
expectCharacterToken(tokens[1], 1, "[");
|
||||
expectCharacterToken(tokens[3], 3, "]");
|
||||
expectCharacterToken(tokens[1], 1, '[');
|
||||
expectCharacterToken(tokens[3], 3, ']');
|
||||
});
|
||||
|
||||
it('should tokenize numbers', function() {
|
||||
var tokens: number[] = lex("88");
|
||||
var tokens: number[] = lex('88');
|
||||
expect(tokens.length).toEqual(1);
|
||||
expectNumberToken(tokens[0], 0, 88);
|
||||
});
|
||||
|
||||
it('should tokenize numbers within index ops',
|
||||
function() { expectNumberToken(lex("a[22]")[2], 2, 22); });
|
||||
function() { expectNumberToken(lex('a[22]')[2], 2, 22); });
|
||||
|
||||
it('should tokenize simple quoted strings',
|
||||
function() { expectStringToken(lex('"a"')[0], 0, "a"); });
|
||||
function() { expectStringToken(lex('"a"')[0], 0, 'a'); });
|
||||
|
||||
it('should tokenize quoted strings with escaped quotes',
|
||||
function() { expectStringToken(lex('"a\\""')[0], 0, 'a"'); });
|
||||
|
||||
it('should tokenize a string', function() {
|
||||
var tokens: Token[] = lex("j-a.bc[22]+1.3|f:'a\\\'c':\"d\\\"e\"");
|
||||
var tokens: Token[] = lex('j-a.bc[22]+1.3|f:\'a\\\'c\':"d\\"e"');
|
||||
expectIdentifierToken(tokens[0], 0, 'j');
|
||||
expectOperatorToken(tokens[1], 1, '-');
|
||||
expectIdentifierToken(tokens[2], 2, 'a');
|
||||
@ -108,27 +113,27 @@ export function main() {
|
||||
expectOperatorToken(tokens[10], 14, '|');
|
||||
expectIdentifierToken(tokens[11], 15, 'f');
|
||||
expectCharacterToken(tokens[12], 16, ':');
|
||||
expectStringToken(tokens[13], 17, "a'c");
|
||||
expectStringToken(tokens[13], 17, 'a\'c');
|
||||
expectCharacterToken(tokens[14], 23, ':');
|
||||
expectStringToken(tokens[15], 24, 'd"e');
|
||||
});
|
||||
|
||||
it('should tokenize undefined', function() {
|
||||
var tokens: Token[] = lex("undefined");
|
||||
expectKeywordToken(tokens[0], 0, "undefined");
|
||||
var tokens: Token[] = lex('undefined');
|
||||
expectKeywordToken(tokens[0], 0, 'undefined');
|
||||
expect(tokens[0].isKeywordUndefined()).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore whitespace', function() {
|
||||
var tokens: Token[] = lex("a \t \n \r b");
|
||||
var tokens: Token[] = lex('a \t \n \r b');
|
||||
expectIdentifierToken(tokens[0], 0, 'a');
|
||||
expectIdentifierToken(tokens[1], 8, 'b');
|
||||
});
|
||||
|
||||
it('should tokenize quoted string', () => {
|
||||
var str = "['\\'', \"\\\"\"]";
|
||||
var str = '[\'\\\'\', "\\""]';
|
||||
var tokens: Token[] = lex(str);
|
||||
expectStringToken(tokens[1], 1, "'");
|
||||
expectStringToken(tokens[1], 1, '\'');
|
||||
expectStringToken(tokens[3], 7, '"');
|
||||
});
|
||||
|
||||
@ -146,7 +151,7 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should tokenize relation', function() {
|
||||
var tokens: Token[] = lex("! == != < > <= >= === !==");
|
||||
var tokens: Token[] = lex('! == != < > <= >= === !==');
|
||||
expectOperatorToken(tokens[0], 0, '!');
|
||||
expectOperatorToken(tokens[1], 2, '==');
|
||||
expectOperatorToken(tokens[2], 5, '!=');
|
||||
@ -159,7 +164,7 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should tokenize statements', function() {
|
||||
var tokens: Token[] = lex("a;b;");
|
||||
var tokens: Token[] = lex('a;b;');
|
||||
expectIdentifierToken(tokens[0], 0, 'a');
|
||||
expectCharacterToken(tokens[1], 1, ';');
|
||||
expectIdentifierToken(tokens[2], 2, 'b');
|
||||
@ -167,19 +172,19 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should tokenize function invocation', function() {
|
||||
var tokens: Token[] = lex("a()");
|
||||
var tokens: Token[] = lex('a()');
|
||||
expectIdentifierToken(tokens[0], 0, 'a');
|
||||
expectCharacterToken(tokens[1], 1, '(');
|
||||
expectCharacterToken(tokens[2], 2, ')');
|
||||
});
|
||||
|
||||
it('should tokenize simple method invocations', function() {
|
||||
var tokens: Token[] = lex("a.method()");
|
||||
var tokens: Token[] = lex('a.method()');
|
||||
expectIdentifierToken(tokens[2], 2, 'method');
|
||||
});
|
||||
|
||||
it('should tokenize method invocation', function() {
|
||||
var tokens: Token[] = lex("a.b.c (d) - e.f()");
|
||||
var tokens: Token[] = lex('a.b.c (d) - e.f()');
|
||||
expectIdentifierToken(tokens[0], 0, 'a');
|
||||
expectCharacterToken(tokens[1], 1, '.');
|
||||
expectIdentifierToken(tokens[2], 2, 'b');
|
||||
@ -197,7 +202,7 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should tokenize number', function() {
|
||||
var tokens: Token[] = lex("0.5");
|
||||
var tokens: Token[] = lex('0.5');
|
||||
expectNumberToken(tokens[0], 0, 0.5);
|
||||
});
|
||||
|
||||
@ -208,34 +213,36 @@ export function main() {
|
||||
// });
|
||||
|
||||
it('should tokenize number with exponent', function() {
|
||||
var tokens: Token[] = lex("0.5E-10");
|
||||
var tokens: Token[] = lex('0.5E-10');
|
||||
expect(tokens.length).toEqual(1);
|
||||
expectNumberToken(tokens[0], 0, 0.5E-10);
|
||||
tokens = lex("0.5E+10");
|
||||
tokens = lex('0.5E+10');
|
||||
expectNumberToken(tokens[0], 0, 0.5E+10);
|
||||
});
|
||||
|
||||
it('should throws exception for invalid exponent', function() {
|
||||
expect(() => { lex("0.5E-"); })
|
||||
.toThrowError('Lexer Error: Invalid exponent at column 4 in expression [0.5E-]');
|
||||
expect(() => {
|
||||
lex('0.5E-');
|
||||
}).toThrowError('Lexer Error: Invalid exponent at column 4 in expression [0.5E-]');
|
||||
|
||||
expect(() => { lex("0.5E-A"); })
|
||||
.toThrowError('Lexer Error: Invalid exponent at column 4 in expression [0.5E-A]');
|
||||
expect(() => {
|
||||
lex('0.5E-A');
|
||||
}).toThrowError('Lexer Error: Invalid exponent at column 4 in expression [0.5E-A]');
|
||||
});
|
||||
|
||||
it('should tokenize number starting with a dot', function() {
|
||||
var tokens: Token[] = lex(".5");
|
||||
var tokens: Token[] = lex('.5');
|
||||
expectNumberToken(tokens[0], 0, 0.5);
|
||||
});
|
||||
|
||||
it('should throw error on invalid unicode', function() {
|
||||
expect(() => { lex("'\\u1''bla'"); })
|
||||
expect(() => { lex('\'\\u1\'\'bla\''); })
|
||||
.toThrowError(
|
||||
"Lexer Error: Invalid unicode escape [\\u1''b] at column 2 in expression ['\\u1''bla']");
|
||||
'Lexer Error: Invalid unicode escape [\\u1\'\'b] at column 2 in expression [\'\\u1\'\'bla\']');
|
||||
});
|
||||
|
||||
it('should tokenize hash as operator', function() {
|
||||
var tokens: Token[] = lex("#");
|
||||
var tokens: Token[] = lex('#');
|
||||
expectOperatorToken(tokens[0], 0, '#');
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user