Compare commits
882 Commits
2.4.0
...
4.0.0-rc.6
Author | SHA1 | Date | |
---|---|---|---|
ee03418b10 | |||
da700d1842 | |||
de87c47dd9 | |||
1060805a1f | |||
08d86751b9 | |||
9319b5f329 | |||
26f6bd4d3b | |||
edb2571a59 | |||
98cb974796 | |||
8b414222aa | |||
ea49a95bd9 | |||
a50d79df47 | |||
64285a2171 | |||
941f194a83 | |||
8b4edcc7ad | |||
c58499786c | |||
1bcbcfd56f | |||
90d2518d9a | |||
7354949763 | |||
5efc86069f | |||
97149f9424 | |||
bac265fdc2 | |||
e59e5e24b9 | |||
9e5d4781cb | |||
fc1f6efe0d | |||
c9710d4fb5 | |||
cf16f3b0dd | |||
a9e91115bf | |||
90f699fdcf | |||
fd7b855cfc | |||
20aab64c65 | |||
2eb027a793 | |||
b0a7bc77ee | |||
4e10faf1eb | |||
a0c6d44e18 | |||
9bc998c7a1 | |||
1a0c6d89b1 | |||
8c12374c4c | |||
45e2126273 | |||
41497b052d | |||
068cad1c1b | |||
31ef92fc36 | |||
b4081e3713 | |||
c8b4a33a7f | |||
a13ddf2e8a | |||
64beae9527 | |||
15a082c74e | |||
fbccd5cd38 | |||
1e8b132ade | |||
431eb309f3 | |||
8e6995c91e | |||
0759911431 | |||
1d7693c1e1 | |||
16e0423085 | |||
c2ffb6bfcd | |||
2489e4ba1b | |||
94da80148e | |||
764e90f9bb | |||
9bf2fb4a74 | |||
de3d2eeeba | |||
a805d00256 | |||
61135bc842 | |||
fa36ffda14 | |||
d3eda7a5b5 | |||
a755b715ed | |||
a9d5de0e56 | |||
f634c62cb3 | |||
f8c075ae27 | |||
aeb99645bb | |||
0d3e314df0 | |||
28ce68a13d | |||
80075afe8a | |||
bcc29ffdd1 | |||
0c43535ccc | |||
49829b4a4d | |||
5c5c2ae405 | |||
6e9264a79c | |||
a6fb78ee3c | |||
d0bc83ca27 | |||
b5b2fed54d | |||
4cef5dddc6 | |||
f92591054b | |||
2a0e55ffb5 | |||
9429032da1 | |||
791534f2f4 | |||
604546c287 | |||
c66437fc13 | |||
8415910375 | |||
5486e5417b | |||
2d78c8cc05 | |||
52bed7f9b3 | |||
7fb45283df | |||
b7212f5afe | |||
4e25601c4d | |||
add7829cb7 | |||
a52184bdda | |||
410aa33005 | |||
0ab49d4cec | |||
994089d36b | |||
d2fbbb44ae | |||
77fd91d615 | |||
492153a986 | |||
c0e05e6f03 | |||
73a46205a0 | |||
992aa17361 | |||
a4076c70cc | |||
52bbc9baf4 | |||
26d4ce29e8 | |||
41f61b0b5b | |||
f92d644c95 | |||
923d0c56e7 | |||
dd36d413ba | |||
013d806b79 | |||
6e98757665 | |||
313158132d | |||
6772c913c7 | |||
b11d0119ac | |||
637a489996 | |||
c2bd357825 | |||
4870f910d6 | |||
c95a3048ce | |||
0e6eb6d719 | |||
09574fc285 | |||
4347cb2119 | |||
f600d4e9e4 | |||
eaa04354d5 | |||
3f7cfde476 | |||
62d5543b01 | |||
f1b33ab7b1 | |||
029d0f25e5 | |||
3f38c6fdcd | |||
80112a9ea1 | |||
795638e18b | |||
322bf7a0d5 | |||
cd981499f9 | |||
c439742a54 | |||
a3e32fb7e1 | |||
1171f91a80 | |||
8e2c8b3e4d | |||
b00fe20afd | |||
36ce0afff6 | |||
5c0ea20bd0 | |||
49764a5bff | |||
4f7d62adac | |||
c10c060d20 | |||
5fe2d8fd80 | |||
5c34066058 | |||
50ab06e29d | |||
06fc42bc44 | |||
959a03a61f | |||
3b1956bbf2 | |||
3c15916e17 | |||
ff60c041f6 | |||
13686bb518 | |||
f093501501 | |||
80649ea03c | |||
778f7d6f33 | |||
2c5a671341 | |||
0aad270267 | |||
221899a930 | |||
060a2d11e5 | |||
abbbb4d52c | |||
ccd38dd54d | |||
cdc882bd36 | |||
1c1085b140 | |||
914797a8ff | |||
ab40fcb068 | |||
1847550ad1 | |||
6497633529 | |||
8850098ea4 | |||
eedca09d73 | |||
7b6dbf0952 | |||
e6c81d2a42 | |||
498a95148b | |||
21a18d6ceb | |||
195b863ea4 | |||
75147ff008 | |||
018e5c979b | |||
e7dab7e6c1 | |||
26efa3a25c | |||
893652a813 | |||
6559425b07 | |||
df914ef4bf | |||
4e1cf5b41a | |||
0c5f893f6e | |||
17f5f3b32c | |||
3bb59902f7 | |||
b804a488c5 | |||
cbde75e77b | |||
413e11fac2 | |||
0e2dd76c3b | |||
ff71eff157 | |||
fa1920a02b | |||
71cd2957f7 | |||
6c8638cf01 | |||
8b5c6b2732 | |||
601494734c | |||
1aebea52e1 | |||
920b3d259d | |||
fce55d87d2 | |||
53d62fa7d0 | |||
a69afeb614 | |||
5f9fb911f7 | |||
d17d4a3b54 | |||
814dc107d9 | |||
bebedfed24 | |||
2b44854885 | |||
b74ab83d2c | |||
da8ea350b2 | |||
1efd508217 | |||
58dd4673cd | |||
72563b61fb | |||
5aed1e36b8 | |||
8573e36574 | |||
3e51a19983 | |||
5ad5301a3e | |||
174d4c8ef7 | |||
4ca772eea3 | |||
093d69f0f0 | |||
b4ec80b21d | |||
a4476654aa | |||
4210d2b4b1 | |||
aa30c50144 | |||
fd34a58e13 | |||
e40f81b564 | |||
5ab2e28703 | |||
a3a7cf2090 | |||
d9d9d9de6f | |||
7e67f37fc4 | |||
d275667da0 | |||
03a5fd01c9 | |||
d28ea80db8 | |||
0e9277b4c3 | |||
2796790c7d | |||
028b274750 | |||
3ed1f64d43 | |||
96f11dad18 | |||
060d02eb82 | |||
951e653b0c | |||
37348989f0 | |||
c5644e5a0d | |||
c8d87a936b | |||
6b8413f7b3 | |||
bc831ff4a4 | |||
8a8d4fe24f | |||
9df9bdc0f5 | |||
c8ead9bcd0 | |||
115164033b | |||
794f8f4e6a | |||
ad3b44aef7 | |||
5df998d086 | |||
1cff1250ba | |||
07122f0ad9 | |||
ebd446397a | |||
55189b1b85 | |||
b017fbe48e | |||
0d6aa0caed | |||
b70c881c00 | |||
0d63e2a586 | |||
d83b7ba4c0 | |||
46d6e8d191 | |||
61ef756ef2 | |||
3529813ca0 | |||
b81693b30c | |||
5815983178 | |||
fe962f6de7 | |||
04e14589c4 | |||
d6c1ccaf14 | |||
4abd6f333c | |||
dd50922747 | |||
66cc88c8a8 | |||
b44bc9c022 | |||
b7e76cc2e1 | |||
3651d8d673 | |||
ba4b6f58d9 | |||
6cd3326b55 | |||
6bc6482765 | |||
f2adb2900d | |||
8343fb7740 | |||
84a65cf788 | |||
b0e0839075 | |||
4fe0b90948 | |||
928c5657c8 | |||
728fe472f8 | |||
32990307fe | |||
00fdcf4e58 | |||
ab0db66bf7 | |||
8757656508 | |||
01ff427685 | |||
2ebfa2ff31 | |||
b09ee424bf | |||
b8321e2f7d | |||
b8f0c3dc7b | |||
91fe3aadbc | |||
a24e652f2b | |||
207298cd3a | |||
62eafa4eec | |||
4626ca2bff | |||
38bb744008 | |||
dca83ec738 | |||
71e22b8d11 | |||
371dc4744c | |||
4767f107fb | |||
5ae4b77d8b | |||
887d32a9bf | |||
3883b736c0 | |||
c2e672cd1c | |||
93c0ab7131 | |||
e6e8123bdd | |||
49aa50886a | |||
180b705227 | |||
0f94b93c81 | |||
de57b2d9fd | |||
9560ad81b9 | |||
3168ef75da | |||
b6e6fc1724 | |||
dd499010be | |||
d2803da8b6 | |||
a6996a9cdd | |||
6bae7378b1 | |||
7a66a4115b | |||
79fc1e3959 | |||
9402df92de | |||
47bdc2b0b7 | |||
49b462e815 | |||
5753de50f0 | |||
d1182af1a4 | |||
bc9e1debf2 | |||
fc9e6b2a0a | |||
968995a4c6 | |||
5ba55b0e04 | |||
126fda2613 | |||
e58cb7ba08 | |||
5caab71f7d | |||
d2e42567a6 | |||
14d37fe052 | |||
f356041f09 | |||
77682a3397 | |||
213e210a93 | |||
c868457e21 | |||
f32f4de95c | |||
26ed2621a3 | |||
c7d1b3664b | |||
da79ad3cec | |||
7b02eae0e8 | |||
0c01d990bf | |||
2602b039b4 | |||
9186068df1 | |||
8824e39325 | |||
932a02f1c5 | |||
3c9a46c231 | |||
436a179552 | |||
5094aef8fd | |||
93ddd38107 | |||
32c2fd5c9f | |||
1f3198cb50 | |||
3e34ba01bd | |||
c3247c64a4 | |||
1282da1b14 | |||
1c08f1a6b2 | |||
dfed388139 | |||
5e9474d24c | |||
b92f52649b | |||
3f8d5ac478 | |||
bee567afad | |||
649bab8ff8 | |||
6a9251874b | |||
c208f97461 | |||
2da3844673 | |||
41da5998cd | |||
be8510356a | |||
01907bafb0 | |||
39f56fafdd | |||
234f05996c | |||
e99d721612 | |||
bf8eb41248 | |||
3f519207a4 | |||
ee747f7d0c | |||
a23634dfd0 | |||
c53621be8e | |||
4b54c0e23f | |||
ccb636c2e9 | |||
d3a98c74d6 | |||
187f7b68f2 | |||
835e18709d | |||
36b78e9502 | |||
4301dce7b0 | |||
6277f16187 | |||
e8d2743cfb | |||
ab3527c99b | |||
5049a50bf6 | |||
6b7937f112 | |||
175dbce354 | |||
e8a27447c4 | |||
9a2ea55bff | |||
a26eb4c04e | |||
801b09066b | |||
a1d4769199 | |||
88bc143431 | |||
830393d234 | |||
88755b0dae | |||
c9bfc59a21 | |||
fcc1d17ccb | |||
de795ea233 | |||
738d93caf7 | |||
bb0460b93b | |||
90226f7714 | |||
7db93310f1 | |||
32012a1ffb | |||
58ba4f0409 | |||
2ddd1c3ed2 | |||
c2d5f203a5 | |||
670f2eca00 | |||
2a191cae2d | |||
c2e0f71a78 | |||
fbe4b76f2d | |||
874243279d | |||
551fe50ebd | |||
d6a58f9f70 | |||
5f3c8441e4 | |||
78e8814103 | |||
7df6f46c1c | |||
601fd3e305 | |||
cdf99cf68b | |||
3517f28609 | |||
f38dbfbd64 | |||
1bdf7061b8 | |||
cc3afc888f | |||
5129e8e47c | |||
0cf753be30 | |||
c4a6263a01 | |||
45eac233eb | |||
ab26b6518d | |||
778ded9fcf | |||
b9f17a9cb2 | |||
74ce121dba | |||
96d06f7f09 | |||
8c20aaa328 | |||
b1a79fd2ec | |||
6c20e6ca2e | |||
88eb3b2ce8 | |||
0fa3895d5b | |||
ba17dcbf2b | |||
88e3d7af9f | |||
3dbd9a04d4 | |||
b36f60c74c | |||
57461e9ed7 | |||
612f120208 | |||
4a56b6e7f6 | |||
724ca373e7 | |||
3b896709a9 | |||
9a6f3d637f | |||
56f232cdd7 | |||
047cda5b3c | |||
9559d3e949 | |||
30380d010b | |||
17486fd696 | |||
0e2fd9d91a | |||
4e7752a12a | |||
e9ba7aa4f8 | |||
bb9c7ae6e7 | |||
b4d444a0a7 | |||
2f2b65bd38 | |||
269cf42b72 | |||
8b81bb1eb6 | |||
e4e9dbe33d | |||
1dc9be4b7d | |||
221b7a1176 | |||
d3f174a57f | |||
8dd16bbe67 | |||
5279d06e88 | |||
adc54302cb | |||
0f161ce27a | |||
563334e2c9 | |||
b565301186 | |||
1ece7366c8 | |||
7ac38aa357 | |||
53cf2ec573 | |||
1cfbefebe3 | |||
e5a144d902 | |||
2c6dab970b | |||
9e28568a8f | |||
db700dfc71 | |||
294c1cd7a7 | |||
fafee5a493 | |||
03e855ae8f | |||
96073e51c3 | |||
baa654a234 | |||
dfe29934b6 | |||
b988733553 | |||
44bb337acc | |||
56b3b3cbed | |||
0dcac966b4 | |||
b9d293af03 | |||
4da7925ad5 | |||
6b9aa2ca3d | |||
a696f4aade | |||
bb4db2d8f3 | |||
4676df5833 | |||
45cc444154 | |||
56e2f84fe8 | |||
600402d440 | |||
5e7a2fa854 | |||
881dce841f | |||
c4817988ca | |||
b64946b5f9 | |||
09b4bd0dfb | |||
c871af7b5a | |||
d1feb478a2 | |||
c211ef9b2d | |||
a7688d27f2 | |||
24af51a623 | |||
f6b5965a63 | |||
7a4c25535d | |||
ef32e6b0d0 | |||
5c431cee02 | |||
2ada3187a6 | |||
c33fda2607 | |||
3c2842be96 | |||
94312f0980 | |||
5bccff0d7a | |||
2e1413016e | |||
a378aab9aa | |||
1e3dd3dd9b | |||
701074cf89 | |||
e9a89c0693 | |||
d0366542fb | |||
e58d683931 | |||
7036e04ec6 | |||
d4ffa47ea6 | |||
80b66edfa7 | |||
12f03b90fd | |||
470997ebb9 | |||
bcba0332a6 | |||
41db177d0c | |||
e4b76a493f | |||
a2a290a83c | |||
14d7844b2b | |||
388afa414e | |||
4370049cea | |||
fb91b2fe78 | |||
0ba5bebf61 | |||
a9096437fd | |||
31d42d87c6 | |||
20a7e26d1e | |||
6e2c9cb586 | |||
559cf9d192 | |||
1961332f26 | |||
f9b929f28d | |||
69a4bb0bcd | |||
a7479f657a | |||
bc20e8ac9d | |||
a05e50fda3 | |||
ae7f5f37d2 | |||
45e1e36477 | |||
08ff67ea11 | |||
1bc5368ea0 | |||
093cc04748 | |||
9d2c71269b | |||
80d3e14ce4 | |||
ef48ee0a0a | |||
ec8e68ed56 | |||
0a29574d98 | |||
86b2b2504f | |||
69e14b500b | |||
1079b9381c | |||
7670cc1a86 | |||
ea63676970 | |||
1367cd9569 | |||
9aafdc7b02 | |||
1f90f29369 | |||
49fce37013 | |||
676081fe66 | |||
e0e5e78835 | |||
b4214d60a6 | |||
8270bec343 | |||
5921c872b6 | |||
52b21275f4 | |||
c48dd76f5c | |||
49fb8143e8 | |||
5f2b3173d7 | |||
c87c3bec93 | |||
2ffa1a71aa | |||
94f84c5d7e | |||
ff290af38c | |||
fe441186e7 | |||
f89d004c51 | |||
6c7300c7de | |||
22058298d3 | |||
104c157ef6 | |||
1df9319af1 | |||
d43c573ebc | |||
a699a448fb | |||
7b7ae5fe56 | |||
94b62c963d | |||
579567ca79 | |||
47d41d492b | |||
e075b1ba83 | |||
029f558d45 | |||
c5ea03a023 | |||
c7245189e2 | |||
cd3901f774 | |||
a64c9b5d5b | |||
863285a4b0 | |||
5f40e5ba21 | |||
d69717cf79 | |||
00979838ef | |||
a277e97dd7 | |||
9e5617e41e | |||
bc1320d926 | |||
77008e35ff | |||
01da4223d4 | |||
0854a5dea4 | |||
df7f5fc550 | |||
24ea3f022b | |||
3368f29a4d | |||
8960d4990c | |||
4165fddfc4 | |||
c37af2af5a | |||
da41a954b5 | |||
5a997ef4f0 | |||
d339d8b81d | |||
827c3fe199 | |||
8775ab9495 | |||
5885c52c1f | |||
80b9570dca | |||
f802194c18 | |||
7ad616a177 | |||
670b680b0a | |||
f7fba74c58 | |||
20b454cbc9 | |||
665dde2e5c | |||
4d5a4d89cd | |||
e130bc171f | |||
b141a227fb | |||
b7763559cd | |||
d1d0ce7613 | |||
2dd9654004 | |||
e35c25d2ce | |||
1e729d7ba2 | |||
fc8694ed11 | |||
05b2b49711 | |||
3ef73c2b19 | |||
4106d18172 | |||
1ce7fd7827 | |||
c4ecaeda64 | |||
b28f01bb7f | |||
c4e7c083e2 | |||
28bdc5af47 | |||
b88714bcdf | |||
d2859cdd71 | |||
4931a615bf | |||
a733444d0e | |||
6152eb24bc | |||
b2f9d56577 | |||
1c24271daf | |||
c3e5ddbe20 | |||
d02eab498f | |||
fc550185fc | |||
0c7726dd74 | |||
83361d811d | |||
1f54040ef4 | |||
65417374f1 | |||
0adb97bffb | |||
f20d1a8af5 | |||
e21e9c5fb7 | |||
d3a3a8e1fc | |||
dff6ee3272 | |||
ba52b2e08c | |||
0589f93e41 | |||
2f87eb52fe | |||
9d8c467cb0 | |||
67dc0912c5 | |||
b049217437 | |||
2191f44025 | |||
4b854be29e | |||
0a724208b9 | |||
1200cf25f4 | |||
635bf02b02 | |||
2d7b3a86cc | |||
523fd84d22 | |||
e8ea741039 | |||
1a92e3d406 | |||
be6c95ad03 | |||
f816319e41 | |||
5047d9780d | |||
b1e3dda5cb | |||
d169c2434e | |||
6d1f1a43bb | |||
e19bf70b47 | |||
a6f8e9fc90 | |||
d6382bfa0b | |||
4dea347101 | |||
5237b1c98c | |||
f364557629 | |||
c2aa981dd6 | |||
dc63cef10a | |||
2c294d5dff | |||
e1af25d93e | |||
123943a6e0 | |||
3a4b54daa4 | |||
95cbca20a5 | |||
aeed7373af | |||
2e3ac70e0a | |||
9aeb8c5357 | |||
424e6c4cb9 | |||
5cb2008e6c | |||
78f42c7aa1 | |||
d4d3782d45 | |||
46cb04d575 | |||
8c7e93bebe | |||
5d9cbd7d6f | |||
d061adc02d | |||
6d29faefea | |||
99aa49ab6c | |||
e5c6bb4286 | |||
d9a22dae4f | |||
fb6c4582a1 | |||
8578682dcf | |||
c0178de0e2 | |||
31322e73b7 | |||
9211a22039 | |||
3f67ab074a | |||
4bae4b3bb5 | |||
02dd90faed | |||
1c85e99588 | |||
ccb65893bf | |||
3e90ffd293 | |||
8063b0d9a2 | |||
21030e9a1c | |||
889b48d85f | |||
1bd04e95de | |||
f88cd2f22e | |||
f822f9599c | |||
9898d8f6d9 | |||
2dd6280ab8 | |||
35f9a1c2cb | |||
465516b905 | |||
db49d422f2 | |||
8ed92d75b0 | |||
50e5cb15dd | |||
c5c53f3666 | |||
bb0d23f82b | |||
1e6440e81b | |||
6b02b80a03 | |||
2c0c86e3ce | |||
5b4bea24de | |||
7690d02133 | |||
b2ae7b607e | |||
7c210645a3 | |||
07e0fce8fc | |||
0ac8e102de | |||
e74d8aaf92 | |||
881eb894bc | |||
0eca960494 | |||
eed83443b8 | |||
e5c4e5801f | |||
69fa3bbc03 | |||
445ed43b9a | |||
174334dec3 | |||
9c697030e6 | |||
697690349f | |||
0448e80704 | |||
e85232afd2 | |||
e7ece6c8ce | |||
67380d4b28 | |||
f114e40212 | |||
952471e25d | |||
c65e428778 | |||
842f52e841 | |||
eb2ceff4ba | |||
f49ab56160 | |||
c0f750af4e | |||
bcd37f52fb | |||
e69c1fb36c | |||
9da4c259a5 | |||
fcd116fdc0 | |||
383adc9ad9 | |||
9b8488f007 | |||
1817ddb57b | |||
1ee574c51e | |||
171a9bdc85 | |||
896916af29 | |||
e49c7fae22 | |||
6b65fc1286 | |||
0e3981afc1 | |||
e78508507d | |||
a23fa94ca8 | |||
4568d5ddac | |||
c6e893953f | |||
55dfa1b69d | |||
0fe3cd9a4c | |||
0c19898694 | |||
5b6e8ea3ec | |||
732f446ad2 | |||
f0e092515c | |||
14e785f5b7 | |||
01d1624884 | |||
33910ddfc9 | |||
01ca2db6ae | |||
6cefccb314 | |||
fa9e21e83c | |||
b6078f5887 | |||
c65b4fa9dc | |||
169ed82900 | |||
fd8e15b15d | |||
aa40366a92 | |||
40d8d9c3e3 | |||
ee2ac025ef | |||
aa3769ba69 | |||
d4ddb6004e | |||
84400bcc86 | |||
42d9998cbb | |||
c18d2fe5e3 | |||
d91a86aac6 | |||
d6e5e9283c | |||
eab7e490c9 | |||
3e90605db9 | |||
79671a6f12 | |||
a659259962 | |||
b56474d067 | |||
8395f0e138 | |||
dd0519abad | |||
f238c8ac7a | |||
8c27c62fab | |||
5031adc7a3 | |||
821b8f09d6 | |||
2bf1bbc071 | |||
7b0a86718c | |||
3edca4d37e | |||
a0a05041ac | |||
7256d0ede5 | |||
d62d89319e | |||
f5f1d5f65c | |||
a8d237581d | |||
d036165a19 | |||
d17e690eb4 | |||
714f2af0dd | |||
2b90cd532f | |||
3a64ad895a | |||
9ec0a4e105 | |||
4b3d135193 | |||
1d0ed6f75f | |||
6f330a5fc9 | |||
e23076f767 | |||
7295a5e7f2 | |||
20bed46737 | |||
2a5012d515 | |||
fb38fba8f9 | |||
4c35be3e07 | |||
e9f307f948 | |||
2e500cc85b | |||
56dce0e26d | |||
8a8c53250e | |||
08ff2e5249 | |||
a006c1418a | |||
90c223591f | |||
aaf6e05f56 | |||
3bee521aa4 | |||
95f48292b1 | |||
04cfa1ebdf | |||
4022173d1e | |||
c8baf51f4f | |||
b4db73d0bf | |||
e15a3f273f | |||
213c713409 | |||
9a8423da36 | |||
f0b0762f4a | |||
b5c4bf1c59 | |||
56c361ff6a | |||
562f7a2f8b | |||
6dd5201765 | |||
72361fb68f | |||
5c6ec20c7e | |||
440ef02f29 | |||
4e3d58a792 | |||
61d7c1e0b3 | |||
bf93389615 | |||
4398056146 | |||
1b547886d0 | |||
9591a08dfb | |||
65965c27a8 |
3
.gitignore
vendored
3
.gitignore
vendored
@ -17,8 +17,9 @@ modules/.vscode
|
||||
# Don't check in secret files
|
||||
*secret.js
|
||||
|
||||
# Ignore npm debug log
|
||||
# Ignore npm/yarn debug log
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
# build-analytics
|
||||
.build-analytics
|
||||
|
256
.pullapprove.yml
Normal file
256
.pullapprove.yml
Normal file
@ -0,0 +1,256 @@
|
||||
# Configuration for pullapprove.com
|
||||
#
|
||||
# Approval access and primary role is determined by info in the project ownership spreadsheet:
|
||||
# https://docs.google.com/spreadsheets/d/1-HIlzfbPYGsPr9KuYMe6bLfc4LXzPjpoALqtYRYTZB0/edit?pli=1#gid=0&vpid=A5
|
||||
#
|
||||
# === GitHub username to Full name map ===
|
||||
#
|
||||
# alexeagle - Alex Eagle
|
||||
# alxhub - Alex Rickabaugh
|
||||
# chuckjaz - Chuck Jazdzewski
|
||||
# gkalpak - George Kalpakas
|
||||
# IgorMinar - Igor Minar
|
||||
# jasonaden - Jason Aden
|
||||
# kara - Kara Erickson
|
||||
# matsko - Matias Niemelä
|
||||
# mhevery - Misko Hevery
|
||||
# petebacondarwin - Pete Bacon Darwin
|
||||
# pkozlowski-opensource - Pawel Kozlowski
|
||||
# robwormald - Rob Wormald
|
||||
# tbosch - Tobias Bosch
|
||||
# vicb - Victor Berchet
|
||||
# vikerman - Vikram Subramanian
|
||||
# wardbell - Ward Bell
|
||||
# tinayuangao - Tina Gao
|
||||
|
||||
version: 2
|
||||
|
||||
group_defaults:
|
||||
required: 1
|
||||
reset_on_reopened:
|
||||
enabled: true
|
||||
approve_by_comment:
|
||||
enabled: false
|
||||
|
||||
groups:
|
||||
root:
|
||||
conditions:
|
||||
files:
|
||||
include:
|
||||
- "*"
|
||||
exclude:
|
||||
- "aio/*"
|
||||
- "integration/*"
|
||||
- "modules/*"
|
||||
- "packages/*"
|
||||
- "tools/*"
|
||||
users:
|
||||
- IgorMinar
|
||||
- mhevery
|
||||
|
||||
public-api:
|
||||
conditions:
|
||||
files:
|
||||
include:
|
||||
- "tools/public_api_guard/*"
|
||||
users:
|
||||
- IgorMinar
|
||||
- mhevery
|
||||
|
||||
build-and-ci:
|
||||
conditions:
|
||||
files:
|
||||
include:
|
||||
- "*.yml"
|
||||
- "*.json"
|
||||
- "*.lock"
|
||||
- "tools/*"
|
||||
exclude:
|
||||
- "tools/@angular/tsc-wrapped/*"
|
||||
- "tools/public_api_guard/*"
|
||||
- "aio/*"
|
||||
users:
|
||||
- IgorMinar #primary
|
||||
- alexeagle
|
||||
- jasonaden
|
||||
- mhevery #fallback
|
||||
|
||||
integration:
|
||||
conditions:
|
||||
files:
|
||||
- "integration/*"
|
||||
users:
|
||||
- alexeagle
|
||||
- mhevery
|
||||
- tbosch
|
||||
- vicb
|
||||
- IgorMinar #fallback
|
||||
|
||||
|
||||
core:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/core/*"
|
||||
users:
|
||||
- tbosch #primary
|
||||
- mhevery
|
||||
- vicb
|
||||
- IgorMinar #fallback
|
||||
|
||||
compiler/animations:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/compiler/src/animation/*"
|
||||
users:
|
||||
- matsko #primary
|
||||
- tbosch
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
compiler/i18n:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/compiler/src/i18n/*"
|
||||
users:
|
||||
- vicb #primary
|
||||
- tbosch
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
compiler:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/compiler/*"
|
||||
users:
|
||||
- tbosch #primary
|
||||
- vicb
|
||||
- chuckjaz
|
||||
- mhevery
|
||||
- IgorMinar #fallback
|
||||
|
||||
compiler-cli:
|
||||
conditions:
|
||||
files:
|
||||
- "tools/@angular/tsc-wrapped/*"
|
||||
- "packages/compiler-cli/*"
|
||||
users:
|
||||
- alexeagle
|
||||
- chuckjaz
|
||||
- tbosch
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
common:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/common/*"
|
||||
users:
|
||||
- pkozlowski-opensource #primary
|
||||
- vicb
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
forms:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/forms/*"
|
||||
users:
|
||||
- kara #primary
|
||||
- tinayuangao #secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
http:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/http/*"
|
||||
users:
|
||||
- vikerman #primary
|
||||
- alxhub
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
language-service:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/language-service/*"
|
||||
users:
|
||||
- chuckjaz #primary
|
||||
# needs secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
router:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/router/*"
|
||||
users:
|
||||
- vicb #primary
|
||||
# needs secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
upgrade:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/upgrade/*"
|
||||
users:
|
||||
- petebacondarwin #primary
|
||||
- gkalpak
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
platform-browser:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/platform-browser/*"
|
||||
users:
|
||||
- tbosch #primary
|
||||
- vicb #secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
platform-server:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/platform-server/*"
|
||||
users:
|
||||
- vikerman #primary
|
||||
- alxhub
|
||||
- vicb
|
||||
- tbosch
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
platform-webworker:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/platform-webworker/*"
|
||||
users:
|
||||
- vicb #primary
|
||||
- tbosch #secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
|
||||
|
||||
benchpress:
|
||||
conditions:
|
||||
files:
|
||||
- "packages/benchpress/*"
|
||||
users:
|
||||
- tbosch #primary
|
||||
# needs secondary
|
||||
- IgorMinar #fallback
|
||||
- mhevery #fallback
|
||||
|
||||
angular.io:
|
||||
conditions:
|
||||
files:
|
||||
- "aio/*"
|
||||
users:
|
||||
- IgorMinar #primary
|
||||
- petebacondarwin #secondary
|
||||
- gkalpak
|
||||
- wardbell
|
||||
- mhevery #fallback
|
54
.travis.yml
54
.travis.yml
@ -1,7 +1,7 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- '6.6.0'
|
||||
- '6.9.5'
|
||||
|
||||
addons:
|
||||
# firefox: "38.0"
|
||||
@ -10,30 +10,44 @@ addons:
|
||||
# needed to install g++ that is used by npms's native modules
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
# needed to install g++ that is used by npms's native modules
|
||||
- g++-4.8
|
||||
|
||||
# https://docs.travis-ci.com/user/jwt
|
||||
jwt:
|
||||
# SAUCE_ACCESS_KEY<=secret for NGBUILDS_IO_KEY to work around travis-ci/travis-ci#7223, unencrypted value in valentine as NGBUILDS_IO_KEY>
|
||||
# we alias NGBUILDS_IO_KEY to $SAUCE_ACCESS_KEY in env.sh and set the SAUCE_ACCESS_KEY there
|
||||
- secure: "L7nrZwkAtFtYrP2DykPXgZvEKjkv0J/TwQ/r2QGxFTaBq4VZn+2Dw0YS7uCxoMqYzDwH0aAOqxoutibVpk8Z/16nE3tNmU5RzltMd6Xmt3qU2f/JDQLMo6PSlBodnjOUsDHJgmtrcbjhqrx/znA237BkNUu6UZRT7mxhXIZpn0U="
|
||||
branches:
|
||||
except:
|
||||
- g3_v2_0
|
||||
- g3
|
||||
|
||||
cache:
|
||||
yarn: true
|
||||
directories:
|
||||
- ./node_modules
|
||||
- ./.chrome/chromium
|
||||
- ./aio/node_modules
|
||||
|
||||
env:
|
||||
global:
|
||||
# GITHUB_TOKEN_ANGULAR
|
||||
# GITHUB_TOKEN_ANGULAR=<github token, a personal access token of the angular-builds account, account access in valentine>
|
||||
# This is needed for the e2e Travis matrix task to publish packages to github for continuous packages delivery.
|
||||
- secure: "fq/U7VDMWO8O8SnAQkdbkoSe2X92PVqg4d044HmRYVmcf6YbO48+xeGJ8yOk0pCBwl3ISO4Q2ot0x546kxfiYBuHkZetlngZxZCtQiFT9kyId8ZKcYdXaIW9OVdw3Gh3tQyUwDucfkVhqcs52D6NZjyE2aWZ4/d1V4kWRO/LMgo="
|
||||
- secure: "rNqXoy2gqjbF5tBXlRBy+oiYntO3BtzcxZuEtlLMzNaTNzC4dyMOFub0GkzIPWwOzkARoEU9Kv+bC97fDVbCBUKeyzzEqxqddUKhzRxeaYjsefJ6XeTvBvDxwo7wDwyxZSuWdBeGAe4eARVHm7ypsd+AlvqxtzjyS27TK2BzdL4="
|
||||
# FIREBASE_TOKEN
|
||||
# This is needed for publishing builds to the "aio-staging" firebase site.
|
||||
# TODO(i): the token was generated using the iminar@google account, we should switch to a shared/role-base account.
|
||||
- secure: "MPx3UM77o5IlhT75PKHL0FXoB5tSXDc3vnCXCd1sRy4XUTZ9vjcV6nNuyqEf+SOw659bGbC1FI4mACGx1Q+z7MQDR85b1mcA9uSgHDkh+IR82CnCVdaX9d1RXafdJIArahxfmorbiiPPLyPIKggo7ituRm+2c+iraoCkE/pXxYg="
|
||||
matrix:
|
||||
# Order: a slower build first, so that we don't occupy an idle travis worker waiting for others to complete.
|
||||
- CI_MODE=js
|
||||
- CI_MODE=e2e
|
||||
- CI_MODE=saucelabs_required
|
||||
- CI_MODE=browserstack_required
|
||||
- CI_MODE=saucelabs_optional
|
||||
- CI_MODE=browserstack_optional
|
||||
- CI_MODE=e2e
|
||||
- CI_MODE=e2e_2
|
||||
- CI_MODE=js
|
||||
- CI_MODE=saucelabs_required
|
||||
- CI_MODE=browserstack_required
|
||||
- CI_MODE=saucelabs_optional
|
||||
- CI_MODE=browserstack_optional
|
||||
- CI_MODE=docs_test
|
||||
- CI_MODE=aio
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
@ -41,11 +55,19 @@ matrix:
|
||||
- env: "CI_MODE=saucelabs_optional"
|
||||
- env: "CI_MODE=browserstack_optional"
|
||||
|
||||
before_install:
|
||||
# source the env.sh script so that the exported variables are available to other scripts later on
|
||||
- source ./scripts/ci/env.sh print
|
||||
|
||||
install:
|
||||
- ./scripts/ci-lite/install.sh
|
||||
- ./scripts/ci/install.sh
|
||||
|
||||
script:
|
||||
- ./scripts/ci-lite/build.sh && ./scripts/ci-lite/test.sh
|
||||
|
||||
after_script:
|
||||
- ./scripts/ci-lite/cleanup.sh
|
||||
- ./scripts/ci/build.sh
|
||||
- ./scripts/ci/test.sh
|
||||
# deploy is part of 'script' and not 'after_success' so that we fail the build if the deployment fails
|
||||
- ./scripts/ci/deploy.sh
|
||||
- ./scripts/ci/angular.sh
|
||||
# all the scripts under this line will not quickly abort in case ${TRAVIS_TEST_RESULT} is 1 (job failure)
|
||||
- ./scripts/ci/cleanup.sh
|
||||
- ./scripts/ci/print-logs.sh
|
||||
|
993
CHANGELOG.md
993
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
34
COMMITTER.md
34
COMMITTER.md
@ -1,34 +0,0 @@
|
||||
# Pushing changes into the Angular 2 tree
|
||||
|
||||
Please see [Using git with Angular repositories](https://docs.google.com/document/d/1h8nijFSaa1jG_UE8v4WP7glh5qOUXnYtAtJh_gwOQHI/edit)
|
||||
for details about how we maintain a linear commit history, and the rules for committing.
|
||||
|
||||
As a contributor, just read the instructions in [CONTRIBUTING.md](CONTRIBUTING.md) and send a pull request.
|
||||
Someone with committer access will do the rest.
|
||||
|
||||
## The `PR: merge` label and `presubmit-*` branches
|
||||
|
||||
We have automated the process for merging pull requests into master. Our goal is to minimize the disruption for
|
||||
Angular committers and also prevent breakages on master.
|
||||
|
||||
When a PR has `pr_state: LGTM` and is ready to merge, you should add the `pr_action: merge` label.
|
||||
Currently (late 2015), we need to ensure that each PR will cleanly merge into the Google-internal version control,
|
||||
so the caretaker reviews the changes manually.
|
||||
|
||||
After this review, the caretaker adds `zomg_admin: do_merge` which is restricted to admins only.
|
||||
A robot running as [mary-poppins](https://github.com/mary-poppins)
|
||||
is notified that the label was added by an authorized person,
|
||||
and will create a new branch in the angular project, using the convention `presubmit-{username}-pr-{number}`.
|
||||
|
||||
(Note: if the automation fails, committers can instead push the commits to a branch following this naming scheme.)
|
||||
|
||||
When a Travis build succeeds for a presubmit branch named following the convention,
|
||||
Travis will re-base the commits, merge to master, and close the PR automatically.
|
||||
|
||||
Finally, after merge `mary-poppins` removes the presubmit branch.
|
||||
|
||||
## Administration
|
||||
|
||||
The list of users who can trigger a merge by adding the `zomg_admin: do_merge` label is stored in our appengine app datastore.
|
||||
Edit the contents of the [CoreTeamMember Table](
|
||||
https://console.developers.google.com/project/angular2-automation/datastore/query?queryType=KindQuery&namespace=&kind=CoreTeamMember)
|
140
DEVELOPER.md
140
DEVELOPER.md
@ -1,140 +0,0 @@
|
||||
# Building and Testing Angular 2 for JS
|
||||
|
||||
This document describes how to set up your development environment to build and test Angular 2 JS version.
|
||||
It also explains the basic mechanics of using `git`, `node`, and `npm`.
|
||||
|
||||
* [Prerequisite Software](#prerequisite-software)
|
||||
* [Getting the Sources](#getting-the-sources)
|
||||
* [Installing NPM Modules](#installing-npm-modules)
|
||||
* [Building](#building)
|
||||
* [Running Tests Locally](#running-tests-locally)
|
||||
|
||||
See the [contribution guidelines](https://github.com/angular/angular/blob/master/CONTRIBUTING.md)
|
||||
if you'd like to contribute to Angular.
|
||||
|
||||
## Prerequisite Software
|
||||
|
||||
Before you can build and test Angular, you must install and configure the
|
||||
following products on your development machine:
|
||||
|
||||
* [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
|
||||
* [Node.js](http://nodejs.org), (version `>=5.4.1 <6`) which is used to run a development web server,
|
||||
run tests, and generate distributable files. We also use Node's Package Manager, `npm`
|
||||
(version `>=3.5.3 <4.0`), which comes with Node. Depending on your system, you can install Node either from
|
||||
source or as a pre-packaged bundle.
|
||||
|
||||
* [Java Development Kit](http://www.oracle.com/technetwork/es/java/javase/downloads/index.html) which is used
|
||||
to execute the selenium standalone server for e2e testing.
|
||||
|
||||
## Getting the Sources
|
||||
|
||||
Fork and clone the Angular repository:
|
||||
|
||||
1. Login to your GitHub account or create one by following the instructions given
|
||||
[here](https://github.com/signup/free).
|
||||
2. [Fork](http://help.github.com/forking) the [main Angular
|
||||
repository](https://github.com/angular/angular).
|
||||
3. Clone your fork of the Angular repository and define an `upstream` remote pointing back to
|
||||
the Angular repository that you forked in the first place.
|
||||
|
||||
```shell
|
||||
# Clone your GitHub repository:
|
||||
git clone git@github.com:<github username>/angular.git
|
||||
|
||||
# Go to the Angular directory:
|
||||
cd angular
|
||||
|
||||
# Add the main Angular repository as an upstream remote to your repository:
|
||||
git remote add upstream https://github.com/angular/angular.git
|
||||
```
|
||||
## Installing NPM Modules
|
||||
|
||||
Next, install the JavaScript modules needed to build and test Angular:
|
||||
|
||||
```shell
|
||||
# Install Angular project dependencies (package.json)
|
||||
npm install
|
||||
```
|
||||
|
||||
**Optional**: In this document, we make use of project local `npm` package scripts and binaries
|
||||
(stored under `./node_modules/.bin`) by prefixing these command invocations with `$(npm bin)`; in
|
||||
particular `gulp` and `protractor` commands. If you prefer, you can drop this path prefix by either:
|
||||
|
||||
*Option 1*: globally installing these two packages as follows:
|
||||
|
||||
* `npm install -g gulp` (you might need to prefix this command with `sudo`)
|
||||
* `npm install -g protractor` (you might need to prefix this command with `sudo`)
|
||||
|
||||
Since global installs can become stale, and required versions can vary by project, we avoid their
|
||||
use in these instructions.
|
||||
|
||||
*Option 2*: defining a bash alias like `alias nbin='PATH=$(npm bin):$PATH'` as detailed in this
|
||||
[Stackoverflow answer](http://stackoverflow.com/questions/9679932/how-to-use-package-installed-locally-in-node-modules/15157360#15157360) and used like this: e.g., `nbin gulp build`.
|
||||
|
||||
## Windows only
|
||||
|
||||
In order to create the right symlinks, run **as administrator**:
|
||||
```shell
|
||||
./scripts/windows/create-symlinks.sh
|
||||
```
|
||||
|
||||
Before submitting a PR, do not forget to remove them:
|
||||
```shell
|
||||
./scripts/windows/remove-symlinks.sh
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
To build Angular run:
|
||||
|
||||
```shell
|
||||
./build.sh
|
||||
```
|
||||
|
||||
* Results are put in the dist folder.
|
||||
|
||||
## Running Tests Locally
|
||||
|
||||
To run tests:
|
||||
|
||||
```shell
|
||||
$ ./test.sh node # Run all angular tests on node
|
||||
|
||||
$ ./test.sh browser # Run all angular tests in browser
|
||||
$ ./test.sh browserNoRouter # Optionally run all angular tests without router in browser
|
||||
|
||||
$ ./test.sh tools # Run angular tooling (not framework) tests
|
||||
```
|
||||
|
||||
You should execute the 3 test suites before submitting a PR to github.
|
||||
|
||||
All the tests are executed on our Continuous Integration infrastructure and a PR could only be merged once the tests pass.
|
||||
|
||||
- CircleCI fails if your code is not formatted properly,
|
||||
- Travis CI fails if any of the test suites described above fails.
|
||||
|
||||
## Update the public API tests
|
||||
|
||||
If you happen to modify the public API of Angular, API golden files must be updated using:
|
||||
|
||||
``` shell
|
||||
$ gulp public-api:update
|
||||
```
|
||||
|
||||
Note: The command `./test.sh tools` fails when the API doesn't match the golden files.
|
||||
|
||||
## <a name="clang-format"></a> Formatting your source code
|
||||
|
||||
Angular uses [clang-format](http://clang.llvm.org/docs/ClangFormat.html) to format the source code. If the source code
|
||||
is not properly formatted, the CI will fail and the PR can not be merged.
|
||||
|
||||
You can automatically format your code by running:
|
||||
|
||||
``` shell
|
||||
$ gulp format
|
||||
```
|
||||
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2014-2016 Google, Inc. http://angular.io
|
||||
Copyright (c) 2014-2017 Google, Inc. http://angular.io
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
13
README.md
13
README.md
@ -5,16 +5,13 @@
|
||||
[](http://issuestats.com/github/angular/angular)
|
||||
[](https://badge.fury.io/js/%40angular%2Fcore)
|
||||
|
||||
[](https://saucelabs.com/u/angular2-ci)
|
||||
[](https://saucelabs.com/u/angular2-ci)
|
||||
*Safari (7+), iOS (7+), Edge (14) and IE mobile (11) are tested on [BrowserStack][browserstack].*
|
||||
|
||||
Angular
|
||||
=========
|
||||
|
||||
Angular is a development platform for building mobile and desktop web applications. This is the
|
||||
repository for [Angular 2][ng2] Typescript/JavaScript (JS).
|
||||
|
||||
Angular2 for [Dart][dart] can be found at [dart-lang/angular2][ng2dart].
|
||||
Angular is a development platform for building mobile and desktop web applications using Typescript/JavaScript (JS) and other languages.
|
||||
|
||||
|
||||
## Quickstart
|
||||
@ -29,9 +26,5 @@ guidelines for [contributing][contributing] and then check out one of our issues
|
||||
|
||||
[browserstack]: https://www.browserstack.com/
|
||||
[contributing]: http://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[dart]: http://www.dartlang.org
|
||||
[quickstart]: https://angular.io/docs/ts/latest/quickstart.html
|
||||
[ng2]: http://angular.io
|
||||
[ngDart]: http://angulardart.org
|
||||
[ngJS]: http://angularjs.org
|
||||
[ng2dart]: https://github.com/dart-lang/angular2
|
||||
[ng]: http://angular.io
|
||||
|
73
aio/.angular-cli.json
Normal file
73
aio/.angular-cli.json
Normal file
@ -0,0 +1,73 @@
|
||||
{
|
||||
"project": {
|
||||
"version": "1.0.0-beta.32.3",
|
||||
"name": "site"
|
||||
},
|
||||
"apps": [
|
||||
{
|
||||
"root": "src",
|
||||
"outDir": "dist",
|
||||
"assets": [
|
||||
"assets",
|
||||
"content",
|
||||
"app/search/search-worker.js",
|
||||
"favicon.ico"
|
||||
],
|
||||
"index": "index.html",
|
||||
"main": "main.ts",
|
||||
"polyfills": "polyfills.ts",
|
||||
"test": "test.ts",
|
||||
"tsconfig": "tsconfig.json",
|
||||
"prefix": "aio",
|
||||
"serviceWorker": true,
|
||||
"styles": [
|
||||
"styles.scss"
|
||||
],
|
||||
"scripts": [
|
||||
|
||||
],
|
||||
"environmentSource": "environments/environment.ts",
|
||||
"environments": {
|
||||
"dev": "environments/environment.ts",
|
||||
"prod": "environments/environment.prod.ts"
|
||||
}
|
||||
}
|
||||
],
|
||||
"e2e": {
|
||||
"protractor": {
|
||||
"config": "./protractor.conf.js"
|
||||
}
|
||||
},
|
||||
"lint": [
|
||||
{
|
||||
"files": "src/**/*.ts",
|
||||
"project": "src/tsconfig.json"
|
||||
},
|
||||
{
|
||||
"files": "e2e/**/*.ts",
|
||||
"project": "e2e/tsconfig.json"
|
||||
}
|
||||
],
|
||||
"test": {
|
||||
"karma": {
|
||||
"config": "./karma.conf.js"
|
||||
}
|
||||
},
|
||||
"defaults": {
|
||||
"styleExt": "scss",
|
||||
"component": {},
|
||||
"prefixInterfaces": false,
|
||||
"inline": {
|
||||
"style": false,
|
||||
"template": false
|
||||
},
|
||||
"spec": {
|
||||
"class": false,
|
||||
"component": true,
|
||||
"directive": true,
|
||||
"module": false,
|
||||
"pipe": true,
|
||||
"service": true
|
||||
}
|
||||
}
|
||||
}
|
5
aio/.firebaserc
Normal file
5
aio/.firebaserc
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"projects": {
|
||||
"staging": "aio-staging"
|
||||
}
|
||||
}
|
11
aio/.gitignore
vendored
Normal file
11
aio/.gitignore
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# Ignore node_modules
|
||||
node_modules
|
||||
|
||||
# Ignore npm/yarn debug log
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
# Ignore generated content
|
||||
/dist
|
||||
/src/content
|
||||
/.sass-cache
|
31
aio/README.md
Normal file
31
aio/README.md
Normal file
@ -0,0 +1,31 @@
|
||||
# Site
|
||||
|
||||
This project was generated with [angular-cli](https://github.com/angular/angular-cli) version 1.0.0-beta.26.
|
||||
|
||||
## Development server
|
||||
Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
|
||||
|
||||
## Code scaffolding
|
||||
|
||||
Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive/pipe/service/class/module`.
|
||||
|
||||
## Build
|
||||
|
||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--prod` flag for a production build.
|
||||
|
||||
## Running unit tests
|
||||
|
||||
Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).
|
||||
|
||||
## Running end-to-end tests
|
||||
|
||||
Run `ng e2e` to execute the end-to-end tests via [Protractor](http://www.protractortest.org/).
|
||||
Before running the tests make sure you are serving the app via `ng serve`.
|
||||
|
||||
## Deploying to GitHub Pages
|
||||
|
||||
Run `ng github-pages:deploy` to deploy to GitHub Pages.
|
||||
|
||||
## Further help
|
||||
|
||||
To get more help on the `angular-cli` use `ng help` or go check out the [Angular-CLI README](https://github.com/angular/angular-cli/blob/master/README.md).
|
3
aio/aio-builds-setup/dockerbuild/.dockerignore
Normal file
3
aio/aio-builds-setup/dockerbuild/.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
scripts-js/lib
|
||||
scripts-js/node_modules
|
||||
scripts-js/**/test
|
158
aio/aio-builds-setup/dockerbuild/Dockerfile
Normal file
158
aio/aio-builds-setup/dockerbuild/Dockerfile
Normal file
@ -0,0 +1,158 @@
|
||||
# Image metadata and config
|
||||
FROM debian:jessie
|
||||
|
||||
LABEL name="angular.io PR preview" \
|
||||
description="This image implements the PR preview functionality for angular.io." \
|
||||
vendor="Angular" \
|
||||
version="1.0"
|
||||
|
||||
VOLUME /aio-secrets
|
||||
VOLUME /var/www/aio-builds
|
||||
|
||||
EXPOSE 80 443
|
||||
|
||||
|
||||
# Build-time args and env vars
|
||||
ARG AIO_BUILDS_DIR=/var/www/aio-builds
|
||||
ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds
|
||||
ARG AIO_DOMAIN_NAME=ngbuilds.io
|
||||
ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost
|
||||
ARG AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG TEST_AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG AIO_GITHUB_TEAM_SLUGS=angular-core,aio-contributors
|
||||
ARG TEST_AIO_GITHUB_TEAM_SLUGS=angular-core,aio-contributors
|
||||
ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME
|
||||
ARG TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_DOMAIN_NAME
|
||||
ARG AIO_NGINX_PORT_HTTP=80
|
||||
ARG TEST_AIO_NGINX_PORT_HTTP=8080
|
||||
ARG AIO_NGINX_PORT_HTTPS=443
|
||||
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||
ARG AIO_REPO_SLUG=angular/angular
|
||||
ARG TEST_AIO_REPO_SLUG=test-repo/test-slug
|
||||
ARG AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||
ARG TEST_AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||
ARG AIO_UPLOAD_MAX_SIZE=20971520
|
||||
ARG TEST_AIO_UPLOAD_MAX_SIZE=20971520
|
||||
ARG AIO_UPLOAD_PORT=3000
|
||||
ARG TEST_AIO_UPLOAD_PORT=3001
|
||||
|
||||
ENV AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
|
||||
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
|
||||
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
|
||||
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
|
||||
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
|
||||
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
|
||||
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
|
||||
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
|
||||
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
|
||||
AIO_REPO_SLUG=$AIO_REPO_SLUG TEST_AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG \
|
||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||
AIO_UPLOAD_HOSTNAME=$AIO_UPLOAD_HOSTNAME TEST_AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME \
|
||||
AIO_UPLOAD_MAX_SIZE=$AIO_UPLOAD_MAX_SIZE TEST_AIO_UPLOAD_MAX_SIZE=$TEST_AIO_UPLOAD_MAX_SIZE \
|
||||
AIO_UPLOAD_PORT=$AIO_UPLOAD_PORT TEST_AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT \
|
||||
NODE_ENV=production
|
||||
|
||||
|
||||
# Create directory for logs
|
||||
RUN mkdir /var/log/aio
|
||||
|
||||
|
||||
# Add extra package sources
|
||||
RUN apt-get update -y && apt-get install -y curl
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash -
|
||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
|
||||
|
||||
# Install packages
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
chkconfig \
|
||||
cron \
|
||||
dnsmasq \
|
||||
nano \
|
||||
nginx \
|
||||
nodejs \
|
||||
openssl \
|
||||
rsyslog \
|
||||
yarn
|
||||
RUN yarn global add pm2@2
|
||||
|
||||
|
||||
# Set up cronjobs
|
||||
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
|
||||
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
|
||||
RUN crontab /etc/cron.d/aio-builds-cleanup
|
||||
RUN printenv | grep AIO_ >> /etc/environment
|
||||
|
||||
|
||||
# Set up dnsmasq
|
||||
COPY dnsmasq/dnsmasq.conf /etc/
|
||||
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
|
||||
|
||||
# Set up SSL/TLS certificates
|
||||
COPY nginx/create-selfsigned-cert.sh /tmp/
|
||||
RUN chmod a+x /tmp/create-selfsigned-cert.sh
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-prod" "$AIO_NGINX_HOSTNAME" "$AIO_LOCALCERTS_DIR"
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-test" "$TEST_AIO_NGINX_HOSTNAME" "$TEST_AIO_LOCALCERTS_DIR"
|
||||
RUN rm /tmp/create-selfsigned-cert.sh
|
||||
RUN update-ca-certificates
|
||||
|
||||
|
||||
# Set up nginx (for production and testing)
|
||||
RUN rm /etc/nginx/sites-enabled/*
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$AIO_DOMAIN_NAME|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
||||
RUN ln -s /etc/nginx/sites-available/aio-builds-prod.conf /etc/nginx/sites-enabled/aio-builds-prod.conf
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$TEST_AIO_DOMAIN_NAME|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|g" /etc/nginx/sites-available/aio-builds-test.conf
|
||||
RUN ln -s /etc/nginx/sites-available/aio-builds-test.conf /etc/nginx/sites-enabled/aio-builds-test.conf
|
||||
|
||||
|
||||
# Set up pm2
|
||||
RUN pm2 startup systemv -u root > /dev/null
|
||||
RUN chkconfig pm2-root on
|
||||
|
||||
|
||||
# Set up the shell scripts
|
||||
COPY scripts-sh/ $AIO_SCRIPTS_SH_DIR/
|
||||
RUN chmod a+x $AIO_SCRIPTS_SH_DIR/*
|
||||
RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
|
||||
| while read file; do ln -s $AIO_SCRIPTS_SH_DIR/$file /usr/local/bin/aio-${file%.*}; done
|
||||
|
||||
|
||||
# Set up the Node.js scripts
|
||||
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
|
||||
WORKDIR $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn install --production
|
||||
|
||||
|
||||
# Set up health check
|
||||
HEALTHCHECK --interval=5m CMD /usr/local/bin/aio-health-check
|
||||
|
||||
|
||||
# Go!
|
||||
WORKDIR /
|
||||
CMD aio-init && tail -f /dev/null
|
@ -0,0 +1,2 @@
|
||||
# Periodically clean up builds that do not correspond to currently open PRs
|
||||
0 12 * * * root /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1
|
16
aio/aio-builds-setup/dockerbuild/dnsmasq/dnsmasq.conf
Normal file
16
aio/aio-builds-setup/dockerbuild/dnsmasq/dnsmasq.conf
Normal file
@ -0,0 +1,16 @@
|
||||
# Do not read /etc/resolv.conf. Get servers from this file instead.
|
||||
no-resolv
|
||||
server=8.8.8.8
|
||||
server=8.8.4.4
|
||||
|
||||
# Listen for DHCP and DNS requests only on this address.
|
||||
listen-address=127.0.0.1
|
||||
|
||||
# Force an IP addres for these domains.
|
||||
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$AIO_UPLOAD_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_UPLOAD_HOSTNAME}}/127.0.0.1
|
||||
|
||||
# Run as root (required from inside docker container).
|
||||
user=root
|
87
aio/aio-builds-setup/dockerbuild/nginx/aio-builds.conf
Normal file
87
aio/aio-builds-setup/dockerbuild/nginx/aio-builds.conf
Normal file
@ -0,0 +1,87 @@
|
||||
# Redirect all HTTP traffic to HTTPS
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTP}} default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTP}};
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Ideally we want 308 (permanent + keep original method),
|
||||
# but it is relatively new and not supported by some clients (e.g. cURL).
|
||||
return 307 https://$host:{{$AIO_NGINX_PORT_HTTPS}}$request_uri;
|
||||
}
|
||||
|
||||
# Serve PR-preview requests
|
||||
server {
|
||||
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{40})\.";
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
|
||||
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
|
||||
disable_symlinks on from=$document_root;
|
||||
index index.html;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
location "~/[^/]+\.[^/]+$" {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html =404;
|
||||
}
|
||||
}
|
||||
|
||||
# Handle all other requests
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Health check
|
||||
location "~^/health-check/?$" {
|
||||
add_header Content-Type text/plain;
|
||||
return 200 '';
|
||||
}
|
||||
|
||||
# Upload builds
|
||||
location "~^/create-build/(?<pr>[1-9][0-9]*)/(?<sha>[0-9a-f]{40})/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
client_body_temp_path /tmp/aio-create-builds;
|
||||
client_body_buffer_size 128K;
|
||||
client_max_body_size {{$AIO_UPLOAD_MAX_SIZE}};
|
||||
client_body_in_file_only on;
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_set_header X-FILE $request_body_file;
|
||||
proxy_set_body off;
|
||||
proxy_redirect off;
|
||||
proxy_method GET;
|
||||
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Everything else
|
||||
location / {
|
||||
return 404;
|
||||
}
|
||||
}
|
20
aio/aio-builds-setup/dockerbuild/nginx/create-selfsigned-cert.sh
Executable file
20
aio/aio-builds-setup/dockerbuild/nginx/create-selfsigned-cert.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
set -eu -o pipefail
|
||||
|
||||
|
||||
# Variables
|
||||
confFile=/tmp/$1.conf
|
||||
domainName=$2
|
||||
outDir=$3
|
||||
|
||||
|
||||
# Create certificate
|
||||
cp /etc/ssl/openssl.cnf "$confFile"
|
||||
echo "[subjectAltName]" >> "$confFile"
|
||||
echo "subjectAltName = DNS:$domainName, DNS:*.$domainName" >> "$confFile"
|
||||
mkdir -p $outDir
|
||||
openssl req -days 365 -newkey rsa:2048 -nodes -sha256 -x509 \
|
||||
-config "$confFile" -extensions subjectAltName -subj "/CN=$domainName" \
|
||||
-out "$outDir/$domainName.crt" -keyout "$outDir/$domainName.key"
|
||||
chmod -R 400 "$outDir"
|
||||
cp "$outDir/$domainName.crt" /usr/local/share/ca-certificates
|
1
aio/aio-builds-setup/dockerbuild/scripts-js/.gitignore
vendored
Normal file
1
aio/aio-builds-setup/dockerbuild/scripts-js/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
/dist/
|
@ -0,0 +1,71 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
|
||||
// Classes
|
||||
export class BuildCleaner {
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string, protected repoSlug: string, protected githubToken: string) {
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public cleanUp(): Promise<void> {
|
||||
return Promise.all([
|
||||
this.getExistingBuildNumbers(),
|
||||
this.getOpenPrNumbers(),
|
||||
]).then(([existingBuilds, openPrs]) => this.removeUnnecessaryBuilds(existingBuilds, openPrs));
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected getExistingBuildNumbers(): Promise<number[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(this.buildsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const buildNumbers = files.
|
||||
map(Number). // Convert string to number
|
||||
filter(Boolean); // Ignore NaN (or 0), because they are not builds
|
||||
|
||||
resolve(buildNumbers);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected getOpenPrNumbers(): Promise<number[]> {
|
||||
const githubPullRequests = new GithubPullRequests(this.githubToken, this.repoSlug);
|
||||
|
||||
return githubPullRequests.
|
||||
fetchAll('open').
|
||||
then(prs => prs.map(pr => pr.number));
|
||||
}
|
||||
|
||||
protected removeDir(dir: string) {
|
||||
try {
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a+w', dir);
|
||||
shell.rm('-rf', dir);
|
||||
} catch (err) {
|
||||
console.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
protected removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]) {
|
||||
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
|
||||
|
||||
console.log(`Existing builds: ${existingBuildNumbers.length}`);
|
||||
console.log(`Open pull requests: ${openPrNumbers.length}`);
|
||||
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||
|
||||
toRemove.
|
||||
map(num => path.join(this.buildsDir, String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
// Imports
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {BuildCleaner} from './build-cleaner';
|
||||
|
||||
// Constants
|
||||
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN', true);
|
||||
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main() {
|
||||
console.log(`[${new Date()}] - Cleaning up builds...`);
|
||||
|
||||
const buildCleaner = new BuildCleaner(AIO_BUILDS_DIR, AIO_REPO_SLUG, AIO_GITHUB_TOKEN);
|
||||
|
||||
buildCleaner.cleanUp().catch(err => {
|
||||
console.error('ERROR:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
@ -0,0 +1,110 @@
|
||||
// Imports
|
||||
import {IncomingMessage} from 'http';
|
||||
import * as https from 'https';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const GITHUB_HOSTNAME = 'api.github.com';
|
||||
|
||||
// Interfaces - Types
|
||||
interface RequestParams {
|
||||
[key: string]: string | number;
|
||||
}
|
||||
|
||||
type RequestParamsOrNull = RequestParams | null;
|
||||
|
||||
// Classes
|
||||
export class GithubApi {
|
||||
protected requestHeaders: {[key: string]: string};
|
||||
|
||||
// Constructor
|
||||
constructor(githubToken: string) {
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
|
||||
this.requestHeaders = {
|
||||
'Authorization': `token ${githubToken}`,
|
||||
'User-Agent': `Node/${process.versions.node}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public get<T>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('get', path);
|
||||
}
|
||||
|
||||
public post<T>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('post', path, data);
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
|
||||
if (params == null) {
|
||||
return pathname;
|
||||
}
|
||||
|
||||
const search = (params === null) ? '' : this.serializeSearchParams(params);
|
||||
const joiner = search && '?';
|
||||
|
||||
return `${pathname}${joiner}${search}`;
|
||||
}
|
||||
|
||||
protected getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 0): Promise<T[]> {
|
||||
const perPage = 100;
|
||||
const params = {
|
||||
...baseParams,
|
||||
page: currentPage,
|
||||
per_page: perPage,
|
||||
};
|
||||
|
||||
return this.get<T[]>(pathname, params).then(items => {
|
||||
if (items.length < perPage) {
|
||||
return items;
|
||||
}
|
||||
|
||||
return this.getPaginated(pathname, baseParams, currentPage + 1).then(moreItems => [...items, ...moreItems]);
|
||||
});
|
||||
}
|
||||
|
||||
protected request<T>(method: string, path: string, data: any = null): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const options = {
|
||||
headers: {...this.requestHeaders},
|
||||
host: GITHUB_HOSTNAME,
|
||||
method,
|
||||
path,
|
||||
};
|
||||
|
||||
const onError = (statusCode: number, responseText: string) => {
|
||||
const url = `https://${GITHUB_HOSTNAME}${path}`;
|
||||
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
|
||||
};
|
||||
const onSuccess = (responseText: string) => {
|
||||
try { resolve(JSON.parse(responseText)); } catch (err) { reject(err); }
|
||||
};
|
||||
const onResponse = (res: IncomingMessage) => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
const isSuccess = (200 <= statusCode) && (statusCode < 400);
|
||||
let responseText = '';
|
||||
|
||||
res.
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => isSuccess ? onSuccess(responseText) : onError(statusCode, responseText)).
|
||||
on('error', reject);
|
||||
};
|
||||
|
||||
https.
|
||||
request(options, onResponse).
|
||||
on('error', reject).
|
||||
end(data && JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
protected serializeSearchParams(params: RequestParams): string {
|
||||
return Object.keys(params).
|
||||
filter(key => params[key] != null).
|
||||
map(key => `${key}=${encodeURIComponent(String(params[key]))}`).
|
||||
join('&');
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
// Imports
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {GithubApi} from './github-api';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface PullRequest {
|
||||
number: number;
|
||||
user: {login: string};
|
||||
}
|
||||
|
||||
export type PullRequestState = 'all' | 'closed' | 'open';
|
||||
|
||||
// Classes
|
||||
export class GithubPullRequests extends GithubApi {
|
||||
// Constructor
|
||||
constructor(githubToken: string, protected repoSlug: string) {
|
||||
super(githubToken);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public addComment(pr: number, body: string): Promise<void> {
|
||||
if (!(pr > 0)) {
|
||||
throw new Error(`Invalid PR number: ${pr}`);
|
||||
} else if (!body) {
|
||||
throw new Error(`Invalid or empty comment body: ${body}`);
|
||||
}
|
||||
|
||||
return this.post<void>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
|
||||
}
|
||||
|
||||
public fetch(pr: number): Promise<PullRequest> {
|
||||
return this.get<PullRequest>(`/repos/${this.repoSlug}/pulls/${pr}`);
|
||||
}
|
||||
|
||||
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
||||
console.log(`Fetching ${state} pull requests...`);
|
||||
|
||||
const pathname = `/repos/${this.repoSlug}/pulls`;
|
||||
const params = {state};
|
||||
|
||||
return this.getPaginated<PullRequest>(pathname, params);
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
// Imports
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {GithubApi} from './github-api';
|
||||
|
||||
// Interfaces - Types
|
||||
interface Team {
|
||||
id: number;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
interface TeamMembership {
|
||||
state: string;
|
||||
}
|
||||
|
||||
// Classes
|
||||
export class GithubTeams extends GithubApi {
|
||||
// Constructor
|
||||
constructor(githubToken: string, protected organization: string) {
|
||||
super(githubToken);
|
||||
assertNotMissingOrEmpty('organization', organization);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public fetchAll(): Promise<Team[]> {
|
||||
return this.getPaginated<Team>(`/orgs/${this.organization}/teams`);
|
||||
}
|
||||
|
||||
public isMemberById(username: string, teamIds: number[]): Promise<boolean> {
|
||||
const getMembership = (teamId: number) =>
|
||||
this.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`).
|
||||
then(membership => membership.state === 'active').
|
||||
catch(() => false);
|
||||
const reduceFn = (promise: Promise<boolean>, teamId: number) =>
|
||||
promise.then(isMember => isMember || getMembership(teamId));
|
||||
|
||||
return teamIds.reduce(reduceFn, Promise.resolve(false));
|
||||
}
|
||||
|
||||
public isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
|
||||
return this.fetchAll().
|
||||
then(teams => teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id)).
|
||||
then(teamIds => this.isMemberById(username, teamIds)).
|
||||
catch(() => false);
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
export const runTests = (specFiles: string[], helpers?: string[]) => {
|
||||
// We can't use `import` here, because of the following mess:
|
||||
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
|
||||
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
|
||||
//
|
||||
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
|
||||
// `jasmine-core` module and the `jasmine` module).
|
||||
// tslint:disable-next-line: no-var-requires variable-name
|
||||
const Jasmine = require('jasmine');
|
||||
const config = {
|
||||
helpers,
|
||||
random: true,
|
||||
spec_files: specFiles,
|
||||
stopSpecOnExpectationFailure: true,
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', (reason: any) => console.log('Unhandled rejection:', reason));
|
||||
|
||||
const runner = new Jasmine();
|
||||
runner.loadConfig(config);
|
||||
runner.onComplete((passed: boolean) => process.exit(passed ? 0 : 1));
|
||||
runner.execute();
|
||||
};
|
@ -0,0 +1,17 @@
|
||||
// Functions
|
||||
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => {
|
||||
if (!value) {
|
||||
throw new Error(`Missing or empty required parameter '${name}'!`);
|
||||
}
|
||||
};
|
||||
|
||||
export const getEnvVar = (name: string, isOptional = false): string => {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!isOptional && !value) {
|
||||
console.error(`ERROR: Missing required environment variable '${name}'!`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return value || '';
|
||||
};
|
@ -0,0 +1,81 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import {EventEmitter} from 'events';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {CreatedBuildEvent} from './build-events';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Classes
|
||||
export class BuildCreator extends EventEmitter {
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string) {
|
||||
super();
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public create(pr: string, sha: string, archivePath: string): Promise<any> {
|
||||
const prDir = path.join(this.buildsDir, pr);
|
||||
const shaDir = path.join(prDir, sha);
|
||||
let dirToRemoveOnError: string;
|
||||
|
||||
return Promise.
|
||||
all([this.exists(prDir), this.exists(shaDir)]).
|
||||
then(([prDirExisted, shaDirExisted]) => {
|
||||
if (shaDirExisted) {
|
||||
throw new UploadError(403, `Request to overwrite existing directory: ${shaDir}`);
|
||||
}
|
||||
|
||||
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
||||
|
||||
return Promise.resolve().
|
||||
then(() => shell.mkdir('-p', shaDir)).
|
||||
then(() => this.extractArchive(archivePath, shaDir)).
|
||||
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha)));
|
||||
}).
|
||||
catch(err => {
|
||||
if (dirToRemoveOnError) {
|
||||
shell.rm('-rf', dirToRemoveOnError);
|
||||
}
|
||||
|
||||
if (!(err instanceof UploadError)) {
|
||||
err = new UploadError(500, `Error while uploading to directory: ${shaDir}\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected exists(fileOrDir: string): Promise<boolean> {
|
||||
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
|
||||
}
|
||||
|
||||
protected extractArchive(inputFile: string, outputDir: string): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const cmd = `tar --extract --gzip --directory "${outputDir}" --file "${inputFile}"`;
|
||||
|
||||
cp.exec(cmd, (err, _stdout, stderr) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
console.warn(stderr);
|
||||
}
|
||||
|
||||
try {
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a-w', outputDir);
|
||||
shell.rm('-f', inputFile);
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
// Classes
|
||||
export class BuildEvent {
|
||||
// Constructor
|
||||
constructor(public type: string, public pr: number, public sha: string) {}
|
||||
}
|
||||
|
||||
export class CreatedBuildEvent extends BuildEvent {
|
||||
// Properties - Public, Static
|
||||
public static type = 'build.created';
|
||||
|
||||
// Constructor
|
||||
constructor(pr: number, sha: string) {
|
||||
super(CreatedBuildEvent.type, pr, sha);
|
||||
}
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
// Imports
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Interfaces - Types
|
||||
interface JwtPayload {
|
||||
slug: string;
|
||||
'pull-request': number;
|
||||
}
|
||||
|
||||
// Classes
|
||||
export class BuildVerifier {
|
||||
// Properties - Protected
|
||||
protected githubPullRequests: GithubPullRequests;
|
||||
protected githubTeams: GithubTeams;
|
||||
|
||||
// Constructor
|
||||
constructor(protected secret: string, githubToken: string, protected repoSlug: string, organization: string,
|
||||
protected allowedTeamSlugs: string[]) {
|
||||
assertNotMissingOrEmpty('secret', secret);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
assertNotMissingOrEmpty('organization', organization);
|
||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||
|
||||
this.githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||
this.githubTeams = new GithubTeams(githubToken, organization);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public getPrAuthorTeamMembership(pr: number): Promise<{author: string, isMember: boolean}> {
|
||||
return Promise.resolve().
|
||||
then(() => this.githubPullRequests.fetch(pr)).
|
||||
then(prInfo => prInfo.user.login).
|
||||
then(author => this.githubTeams.isMemberBySlug(author, this.allowedTeamSlugs).
|
||||
then(isMember => ({author, isMember})));
|
||||
}
|
||||
|
||||
public verify(expectedPr: number, authHeader: string): Promise<void> {
|
||||
return Promise.resolve().
|
||||
then(() => this.extractJwtString(authHeader)).
|
||||
then(jwtString => this.verifyJwt(expectedPr, jwtString)).
|
||||
then(jwtPayload => this.verifyPr(jwtPayload['pull-request'])).
|
||||
catch(err => { throw new UploadError(403, `Error while verifying upload for PR ${expectedPr}: ${err}`); });
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected extractJwtString(input: string): string {
|
||||
return input.replace(/^token +/i, '');
|
||||
}
|
||||
|
||||
protected verifyJwt(expectedPr: number, token: string): Promise<JwtPayload> {
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.verify(token, this.secret, {issuer: 'Travis CI, GmbH'}, (err, payload) => {
|
||||
if (err) {
|
||||
reject(err.message || err);
|
||||
} else if (payload.slug !== this.repoSlug) {
|
||||
reject(`jwt slug invalid. expected: ${this.repoSlug}`);
|
||||
} else if (payload['pull-request'] !== expectedPr) {
|
||||
reject(`jwt pull-request invalid. expected: ${expectedPr}`);
|
||||
} else {
|
||||
resolve(payload);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected verifyPr(pr: number): Promise<void> {
|
||||
return this.getPrAuthorTeamMembership(pr).
|
||||
then(({author, isMember}) => isMember ? Promise.resolve() : Promise.reject(
|
||||
`User '${author}' is not an active member of any of the following teams: ` +
|
||||
`${this.allowedTeamSlugs.join(', ')}`,
|
||||
));
|
||||
}
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
// Imports
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main() {
|
||||
const secret = 'unused';
|
||||
const githubToken = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
const repoSlug = getEnvVar('AIO_REPO_SLUG');
|
||||
const organization = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const allowedTeamSlugs = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||
const pr = +getEnvVar('AIO_PREVERIFY_PR');
|
||||
|
||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, organization, allowedTeamSlugs);
|
||||
|
||||
// Exit codes:
|
||||
// - 0: The PR author is a member.
|
||||
// - 1: The PR author is not a member.
|
||||
// - 2: An error occurred.
|
||||
buildVerifier.getPrAuthorTeamMembership(pr).
|
||||
then(({author, isMember}) => {
|
||||
if (isMember) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
const errorMessage = `User '${author}' is not an active member of any of the following teams: ` +
|
||||
`${allowedTeamSlugs.join(', ')}`;
|
||||
onError(errorMessage, 1);
|
||||
}
|
||||
}).
|
||||
catch(err => onError(err, 2));
|
||||
}
|
||||
|
||||
function onError(err: string, exitCode: number) {
|
||||
console.error(err);
|
||||
process.exit(exitCode || 1);
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
// Imports
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
|
||||
// Run
|
||||
// TODO(gkalpak): Add e2e tests to cover these interactions as well.
|
||||
GithubPullRequests.prototype.addComment = () => Promise.resolve();
|
||||
BuildVerifier.prototype.verify = () => Promise.resolve();
|
||||
// tslint:disable-next-line: no-var-requires
|
||||
require('./index');
|
@ -0,0 +1,35 @@
|
||||
// TODO(gkalpak): Find more suitable way to run as `www-data`.
|
||||
process.setuid('www-data');
|
||||
|
||||
// Imports
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {uploadServerFactory} from './upload-server-factory';
|
||||
|
||||
// Constants
|
||||
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
|
||||
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
const AIO_PREVIEW_DEPLOYMENT_TOKEN = getEnvVar('AIO_PREVIEW_DEPLOYMENT_TOKEN');
|
||||
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
||||
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
|
||||
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main() {
|
||||
uploadServerFactory.
|
||||
create({
|
||||
buildsDir: AIO_BUILDS_DIR,
|
||||
domainName: AIO_DOMAIN_NAME,
|
||||
githubOrganization: AIO_GITHUB_ORGANIZATION,
|
||||
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
|
||||
githubToken: AIO_GITHUB_TOKEN,
|
||||
repoSlug: AIO_REPO_SLUG,
|
||||
secret: AIO_PREVIEW_DEPLOYMENT_TOKEN,
|
||||
}).
|
||||
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
// Classes
|
||||
export class UploadError extends Error {
|
||||
// Constructor
|
||||
constructor(public status: number = 500, message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, UploadError.prototype);
|
||||
}
|
||||
}
|
@ -0,0 +1,117 @@
|
||||
// Imports
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {BuildCreator} from './build-creator';
|
||||
import {CreatedBuildEvent} from './build-events';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Constants
|
||||
const AUTHORIZATION_HEADER = 'AUTHORIZATION';
|
||||
const X_FILE_HEADER = 'X-FILE';
|
||||
|
||||
// Interfaces - Types
|
||||
interface UploadServerConfig {
|
||||
buildsDir: string;
|
||||
domainName: string;
|
||||
githubOrganization: string;
|
||||
githubTeamSlugs: string[];
|
||||
githubToken: string;
|
||||
repoSlug: string;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
// Classes
|
||||
class UploadServerFactory {
|
||||
// Methods - Public
|
||||
public create({
|
||||
buildsDir,
|
||||
domainName,
|
||||
githubOrganization,
|
||||
githubTeamSlugs,
|
||||
githubToken,
|
||||
repoSlug,
|
||||
secret,
|
||||
}: UploadServerConfig): http.Server {
|
||||
assertNotMissingOrEmpty('domainName', domainName);
|
||||
|
||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, githubOrganization, githubTeamSlugs);
|
||||
const buildCreator = this.createBuildCreator(buildsDir, githubToken, repoSlug, domainName);
|
||||
|
||||
const middleware = this.createMiddleware(buildVerifier, buildCreator);
|
||||
const httpServer = http.createServer(middleware);
|
||||
|
||||
httpServer.on('listening', () => {
|
||||
const info = httpServer.address();
|
||||
console.info(`Up and running (and listening on ${info.address}:${info.port})...`);
|
||||
});
|
||||
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected createBuildCreator(buildsDir: string, githubToken: string, repoSlug: string,
|
||||
domainName: string): BuildCreator {
|
||||
const buildCreator = new BuildCreator(buildsDir);
|
||||
const githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||
|
||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha}: CreatedBuildEvent) => {
|
||||
const body = `The angular.io preview for ${sha.slice(0, 7)} is available [here][1].\n\n` +
|
||||
`[1]: https://pr${pr}-${sha}.${domainName}/`;
|
||||
|
||||
githubPullRequests.addComment(pr, body);
|
||||
});
|
||||
|
||||
return buildCreator;
|
||||
}
|
||||
|
||||
protected createMiddleware(buildVerifier: BuildVerifier, buildCreator: BuildCreator): express.Express {
|
||||
const middleware = express();
|
||||
|
||||
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
|
||||
const pr = req.params[0];
|
||||
const sha = req.params[1];
|
||||
const archive = req.header(X_FILE_HEADER);
|
||||
const authHeader = req.header(AUTHORIZATION_HEADER);
|
||||
|
||||
if (!authHeader) {
|
||||
this.throwRequestError(401, `Missing or empty '${AUTHORIZATION_HEADER}' header`, req);
|
||||
} else if (!archive) {
|
||||
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
|
||||
}
|
||||
|
||||
buildVerifier.
|
||||
verify(+pr, authHeader).
|
||||
then(() => buildCreator.create(pr, sha, archive)).
|
||||
then(() => res.sendStatus(201)).
|
||||
catch(err => this.respondWithError(res, err));
|
||||
});
|
||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||
middleware.get('*', req => this.throwRequestError(404, 'Unknown resource', req));
|
||||
middleware.all('*', req => this.throwRequestError(405, 'Unsupported method', req));
|
||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
|
||||
|
||||
return middleware;
|
||||
}
|
||||
|
||||
protected respondWithError(res: express.Response, err: any) {
|
||||
if (!(err instanceof UploadError)) {
|
||||
err = new UploadError(500, String((err && err.message) || err));
|
||||
}
|
||||
|
||||
const statusText = http.STATUS_CODES[err.status] || '???';
|
||||
console.error(`Upload error: ${err.status} - ${statusText}`);
|
||||
console.error(err.message);
|
||||
|
||||
res.status(err.status).end(err.message);
|
||||
}
|
||||
|
||||
protected throwRequestError(status: number, error: string, req: express.Request) {
|
||||
throw new UploadError(status, `${error} in request: ${req.method} ${req.originalUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Exports
|
||||
export const uploadServerFactory = new UploadServerFactory();
|
@ -0,0 +1,191 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as http from 'http';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {getEnvVar} from '../common/utils';
|
||||
|
||||
// Constans
|
||||
const SERVER_USER = 'www-data';
|
||||
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR');
|
||||
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME');
|
||||
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP');
|
||||
const TEST_AIO_NGINX_PORT_HTTPS = +getEnvVar('TEST_AIO_NGINX_PORT_HTTPS');
|
||||
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
|
||||
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
|
||||
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
|
||||
|
||||
// Interfaces - Types
|
||||
export interface CmdResult { success: boolean; err: Error; stdout: string; stderr: string; }
|
||||
export interface FileSpecs { content?: string; size?: number; }
|
||||
|
||||
export type CleanUpFn = () => void;
|
||||
export type TestSuiteFactory = (scheme: string, port: number) => void;
|
||||
export type VerifyCmdResultFn = (result: CmdResult) => void;
|
||||
|
||||
// Classes
|
||||
class Helper {
|
||||
// Properties - Public
|
||||
public get buildsDir() { return TEST_AIO_BUILDS_DIR; }
|
||||
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
|
||||
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
|
||||
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
|
||||
public get serverUser() { return SERVER_USER; }
|
||||
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
|
||||
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
|
||||
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
|
||||
|
||||
// Properties - Protected
|
||||
protected cleanUpFns: CleanUpFn[] = [];
|
||||
protected portPerScheme: {[scheme: string]: number} = {
|
||||
http: this.nginxPortHttp,
|
||||
https: this.nginxPortHttps,
|
||||
};
|
||||
|
||||
// Constructor
|
||||
constructor() {
|
||||
shell.mkdir('-p', this.buildsDir);
|
||||
shell.exec(`chown -R ${this.serverUser} ${this.buildsDir}`);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public cleanUp() {
|
||||
while (this.cleanUpFns.length) {
|
||||
// Clean-up fns remove themselves from the list.
|
||||
this.cleanUpFns[0]();
|
||||
}
|
||||
|
||||
if (fs.readdirSync(this.buildsDir).length) {
|
||||
throw new Error(`Directory '${this.buildsDir}' is not empty after clean-up.`);
|
||||
}
|
||||
}
|
||||
|
||||
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn {
|
||||
const inputDir = path.join(this.buildsDir, 'uploaded', pr, sha);
|
||||
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
|
||||
const cmd2 = `chown ${this.serverUser} ${archivePath}`;
|
||||
|
||||
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true);
|
||||
shell.exec(cmd1);
|
||||
shell.exec(cmd2);
|
||||
cleanUpTemp();
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
|
||||
}
|
||||
|
||||
public createDummyBuild(pr: string, sha: string, force = false): CleanUpFn {
|
||||
const prDir = path.join(this.buildsDir, pr);
|
||||
const shaDir = path.join(prDir, sha);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
||||
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
||||
shell.exec(`chown -R ${this.serverUser} ${prDir}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
||||
}
|
||||
|
||||
public deletePrDir(pr: string) {
|
||||
const prDir = path.join(this.buildsDir, pr);
|
||||
|
||||
if (fs.existsSync(prDir)) {
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a+w', prDir);
|
||||
shell.rm('-rf', prDir);
|
||||
}
|
||||
}
|
||||
|
||||
public readBuildFile(pr: string, sha: string, relFilePath: string): string {
|
||||
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
|
||||
return fs.readFileSync(absFilePath, 'utf8');
|
||||
}
|
||||
|
||||
public runCmd(cmd: string, opts: cp.ExecFileOptions = {}): Promise<CmdResult> {
|
||||
return new Promise(resolve => {
|
||||
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
|
||||
this.createCleanUpFn(() => proc.kill());
|
||||
});
|
||||
}
|
||||
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory) {
|
||||
Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme]));
|
||||
}
|
||||
|
||||
public verifyResponse(status: number | [number, string], regex = /^/): VerifyCmdResultFn {
|
||||
let statusCode: number;
|
||||
let statusText: string;
|
||||
|
||||
if (Array.isArray(status)) {
|
||||
statusCode = status[0];
|
||||
statusText = status[1];
|
||||
} else {
|
||||
statusCode = status;
|
||||
statusText = http.STATUS_CODES[statusCode];
|
||||
}
|
||||
|
||||
return (result: CmdResult) => {
|
||||
const [headers, body] = result.stdout.
|
||||
split(/(?:\r?\n){2,}/).
|
||||
map(s => s.trim()).
|
||||
slice(-2);
|
||||
|
||||
if (!result.success) {
|
||||
console.log('Stdout:', result.stdout);
|
||||
console.log('Stderr:', result.stderr);
|
||||
console.log('Error:', result.err);
|
||||
}
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(headers).toContain(`${statusCode} ${statusText}`);
|
||||
expect(body).toMatch(regex);
|
||||
};
|
||||
}
|
||||
|
||||
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string): CleanUpFn {
|
||||
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
|
||||
return this.writeFile(absFilePath, {content}, true);
|
||||
}
|
||||
|
||||
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): CleanUpFn {
|
||||
if (!force && fs.existsSync(filePath)) {
|
||||
throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
|
||||
}
|
||||
|
||||
let cleanUpTarget = filePath;
|
||||
while (!fs.existsSync(path.dirname(cleanUpTarget))) {
|
||||
cleanUpTarget = path.dirname(cleanUpTarget);
|
||||
}
|
||||
|
||||
shell.mkdir('-p', path.dirname(filePath));
|
||||
if (size) {
|
||||
// Create a file of the specified size.
|
||||
cp.execSync(`fallocate -l ${size} ${filePath}`);
|
||||
} else {
|
||||
// Create a file with the specified content.
|
||||
fs.writeFileSync(filePath, content || '');
|
||||
}
|
||||
shell.exec(`chown ${this.serverUser} ${filePath}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected createCleanUpFn(fn: Function): CleanUpFn {
|
||||
const cleanUpFn = () => {
|
||||
const idx = this.cleanUpFns.indexOf(cleanUpFn);
|
||||
if (idx !== -1) {
|
||||
this.cleanUpFns.splice(idx, 1);
|
||||
fn();
|
||||
}
|
||||
};
|
||||
|
||||
this.cleanUpFns.push(cleanUpFn);
|
||||
|
||||
return cleanUpFn;
|
||||
}
|
||||
}
|
||||
|
||||
// Exports
|
||||
export const helper = new Helper();
|
@ -0,0 +1,6 @@
|
||||
// Imports
|
||||
import {runTests} from '../common/run-tests';
|
||||
|
||||
// Run
|
||||
const specFiles = [`${__dirname}/**/*.e2e.js`];
|
||||
runTests(specFiles);
|
@ -0,0 +1,271 @@
|
||||
// Imports
|
||||
import * as path from 'path';
|
||||
import {helper as h} from './helper';
|
||||
|
||||
// Tests
|
||||
describe(`nginx`, () => {
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
it('should redirect HTTP to HTTPS', done => {
|
||||
const httpHost = `${h.nginxHostname}:${h.nginxPortHttp}`;
|
||||
const httpsHost = `${h.nginxHostname}:${h.nginxPortHttps}`;
|
||||
const urlMap = {
|
||||
[`http://${httpHost}/`]: `https://${httpsHost}/`,
|
||||
[`http://${httpHost}/foo`]: `https://${httpsHost}/foo`,
|
||||
[`http://foo.${httpHost}/`]: `https://foo.${httpsHost}/`,
|
||||
};
|
||||
|
||||
const verifyRedirection = (httpUrl: string) => h.runCmd(`curl -i ${httpUrl}`).then(result => {
|
||||
h.verifyResponse(307)(result);
|
||||
|
||||
const headers = result.stdout.split(/(?:\r?\n){2,}/)[0];
|
||||
expect(headers).toContain(`Location: ${urlMap[httpUrl]}`);
|
||||
});
|
||||
|
||||
Promise.
|
||||
all(Object.keys(urlMap).map(verifyRedirection)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`nginx (on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = h.nginxHostname;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
|
||||
|
||||
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyBuild(pr, sha9);
|
||||
h.createDummyBuild(pr, sha0);
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html', done => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js', done => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 403 for directories', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths to files', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown PRs/SHAs', done => {
|
||||
const otherPr = 54321;
|
||||
const otherSha = '8'.repeat(40);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}9.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherSha}.${host}`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the subdomain format is wrong', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://p${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://r${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${sha9}.${host}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${sha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
||||
|
||||
it('should disallow non-POST requests', done => {
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it(`should reject files larger than ${h.uploadMaxSize}B (according to header)`, done => {
|
||||
const headers = `--header "Content-Length: ${1.5 * h.uploadMaxSize}"`;
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
h.runCmd(`curl -iLX POST ${headers} ${url}`).
|
||||
then(h.verifyResponse([413, 'Request Entity Too Large'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it(`should reject files larger than ${h.uploadMaxSize}B (without header)`, done => {
|
||||
const filePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
h.writeFile(filePath, {size: 1.5 * h.uploadMaxSize});
|
||||
|
||||
h.runCmd(`curl -iLX POST --data-binary "@${filePath}" ${url}`).
|
||||
then(h.verifyResponse([413, 'Request Entity Too Large'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the upload server', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(401, /Missing or empty 'AUTHORIZATION' header/)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/0${pr}/${sha9}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}/create-build/${pr}`;
|
||||
const bodyRegex = /Missing or empty 'AUTHORIZATION' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/0${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/${sha0}`).then(h.verifyResponse(401, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for unkown URLs (even if the resource exists)', done => {
|
||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(h.buildsDir, relFilePath);
|
||||
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
});
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
@ -0,0 +1,84 @@
|
||||
// Imports
|
||||
import * as path from 'path';
|
||||
import {helper as h} from './helper';
|
||||
|
||||
// Tests
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = h.nginxHostname;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr9 = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
|
||||
const getFile = (pr: string, sha: string, file: string) =>
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha}.${host}/${file}`);
|
||||
const uploadBuild = (pr: string, sha: string, archive: string) => {
|
||||
const curlPost = 'curl -iLX POST --header "Authorization: Token FOO"';
|
||||
return h.runCmd(`${curlPost} --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
|
||||
};
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => {
|
||||
h.deletePrDir(pr9);
|
||||
h.cleanUp();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to upload and serve a build for a new PR', done => {
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to upload and serve a build for an existing PR', done => {
|
||||
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
||||
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
||||
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(pr9, sha0);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)),
|
||||
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)),
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite a build', done => {
|
||||
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(pr9, sha9);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(h.verifyResponse(403)).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
}));
|
@ -0,0 +1,266 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {CmdResult, helper as h} from './helper';
|
||||
|
||||
// Tests
|
||||
describe('upload-server (on HTTP)', () => {
|
||||
const hostname = h.uploadHostname;
|
||||
const port = h.uploadPort;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
||||
const authorizationHeader = `--header "Authorization: Token FOO"`;
|
||||
const xFileHeader = `--header "X-File: ${h.buildsDir}/snapshot.tar.gz"`;
|
||||
const curl = `curl -iL ${authorizationHeader} ${xFileHeader}`;
|
||||
|
||||
|
||||
it('should disallow non-GET requests', done => {
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Unsupported method/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests without an \'AUTHORIZATION\' header', done => {
|
||||
const headers1 = '';
|
||||
const headers2 = '--header "AUTHORIXATION: "';
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Missing or empty 'AUTHORIZATION' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${headers1} ${url}`).then(h.verifyResponse(401, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${headers2} ${url}`).then(h.verifyResponse(401, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests without an \'X-FILE\' header', done => {
|
||||
const headers1 = authorizationHeader;
|
||||
const headers2 = `${authorizationHeader} --header "X-FILE: "`;
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Missing or empty 'X-FILE' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${headers1} ${url}`).then(h.verifyResponse(400, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${headers2} ${url}`).then(h.verifyResponse(400, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `${curl} http://${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`${curl} http://${host}/create-build/0${pr}/${sha9}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/0${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).then(h.verifyResponse(500)),
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha0}`).then(h.verifyResponse(500)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds', done => {
|
||||
h.createDummyBuild(pr, sha9);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain('index.html');
|
||||
|
||||
h.writeBuildFile(pr, sha9, 'index.html', 'My content');
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content');
|
||||
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(403, /^Request to overwrite existing directory/)).
|
||||
then(() => expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content')).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should delete the PR directory on error (for new PR)', done => {
|
||||
const prDir = path.join(h.buildsDir, pr);
|
||||
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(500)).
|
||||
then(() => expect(fs.existsSync(prDir)).toBe(false)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should only delete the SHA directory on error (for existing PR)', done => {
|
||||
const prDir = path.join(h.buildsDir, pr);
|
||||
const shaDir = path.join(prDir, sha9);
|
||||
|
||||
h.createDummyBuild(pr, sha0);
|
||||
|
||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(500)).
|
||||
then(() => {
|
||||
expect(fs.existsSync(shaDir)).toBe(false);
|
||||
expect(fs.existsSync(prDir)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('on successful upload', () => {
|
||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
let uploadPromise: Promise<CmdResult>;
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyArchive(pr, sha9, archivePath);
|
||||
uploadPromise = h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`);
|
||||
});
|
||||
afterEach(() => h.deletePrDir(pr));
|
||||
|
||||
|
||||
it('should respond with 201', done => {
|
||||
uploadPromise.then(h.verifyResponse(201)).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should extract the contents of the uploaded file', done => {
|
||||
uploadPromise.
|
||||
then(() => {
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain(`uploaded/${pr}`);
|
||||
expect(h.readBuildFile(pr, sha9, 'foo/bar.js')).toContain(`uploaded/${pr}`);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it(`should create files/directories owned by '${h.serverUser}'`, done => {
|
||||
const shaDir = path.join(h.buildsDir, pr, sha9);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
uploadPromise.
|
||||
then(() => Promise.all([
|
||||
h.runCmd(`find ${shaDir}`),
|
||||
h.runCmd(`find ${shaDir} -user ${h.serverUser}`),
|
||||
])).
|
||||
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
|
||||
expect(userFiles).toBe(allFiles);
|
||||
expect(userFiles).toContain(shaDir);
|
||||
expect(userFiles).toContain(idxPath);
|
||||
expect(userFiles).toContain(barPath);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should delete the uploaded file', done => {
|
||||
expect(fs.existsSync(archivePath)).toBe(true);
|
||||
uploadPromise.
|
||||
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should make the build directory non-writable', done => {
|
||||
const shaDir = path.join(h.buildsDir, pr, sha9);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||
const isNotWritable = (fileOrDir: string) => {
|
||||
const mode = fs.statSync(fileOrDir).mode;
|
||||
// tslint:disable-next-line: no-bitwise
|
||||
return !(mode & parseInt('222', 8));
|
||||
};
|
||||
|
||||
uploadPromise.
|
||||
then(() => {
|
||||
expect(isNotWritable(shaDir)).toBe(true);
|
||||
expect(isNotWritable(idxPath)).toBe(true);
|
||||
expect(isNotWritable(barPath)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL http://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for GET requests to unknown URLs', done => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 405 for non-GET requests to any URL', done => {
|
||||
const bodyRegex = /^Unsupported method/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
43
aio/aio-builds-setup/dockerbuild/scripts-js/package.json
Normal file
43
aio/aio-builds-setup/dockerbuild/scripts-js/package.json
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "aio-scripts-js",
|
||||
"version": "1.0.0",
|
||||
"description": "Performing various tasks on PR build artifacts for angular.io.",
|
||||
"repository": "https://github.com/angular/angular.git",
|
||||
"author": "Angular",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prebuild": "yarn run clean",
|
||||
"build": "tsc",
|
||||
"build-watch": "yarn run tsc -- --watch",
|
||||
"clean": "node --eval \"require('shelljs').rm('-rf', 'dist')\"",
|
||||
"dev": "concurrently --kill-others --raw --success first \"yarn run build-watch\" \"yarn run test-watch\"",
|
||||
"lint": "tslint --project tsconfig.json",
|
||||
"pre~~test-only": "yarn run lint",
|
||||
"~~test-only": "node dist/test",
|
||||
"pretest": "yarn run build",
|
||||
"test": "yarn run ~~test-only",
|
||||
"pretest-watch": "yarn run build",
|
||||
"test-watch": "nodemon --exec \"yarn run ~~test-only\" --watch dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.14.1",
|
||||
"jasmine": "^2.5.3",
|
||||
"jsonwebtoken": "^7.3.0",
|
||||
"shelljs": "^0.7.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.0.35",
|
||||
"@types/jasmine": "^2.5.43",
|
||||
"@types/jsonwebtoken": "^7.2.0",
|
||||
"@types/node": "^7.0.5",
|
||||
"@types/shelljs": "^0.7.0",
|
||||
"@types/supertest": "^2.0.0",
|
||||
"concurrently": "^3.3.0",
|
||||
"eslint": "^3.15.0",
|
||||
"eslint-plugin-jasmine": "^2.2.0",
|
||||
"nodemon": "^1.11.0",
|
||||
"supertest": "^3.0.0",
|
||||
"tslint": "^4.4.2",
|
||||
"typescript": "^2.1.6"
|
||||
}
|
||||
}
|
@ -0,0 +1,318 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as shell from 'shelljs';
|
||||
import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
|
||||
// Tests
|
||||
describe('BuildCleaner', () => {
|
||||
let cleaner: BuildCleaner;
|
||||
|
||||
beforeEach(() => cleaner = new BuildCleaner('/foo/bar', 'baz/qux', '12345'));
|
||||
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'buildsDir\' is empty', () => {
|
||||
expect(() => new BuildCleaner('', '/baz/qux', '12345')).
|
||||
toThrowError('Missing or empty required parameter \'buildsDir\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'repoSlug\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', '', '12345')).
|
||||
toThrowError('Missing or empty required parameter \'repoSlug\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubToken\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', 'baz/qux', '')).
|
||||
toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('cleanUp()', () => {
|
||||
let cleanerGetExistingBuildNumbersSpy: jasmine.Spy;
|
||||
let cleanerGetOpenPrNumbersSpy: jasmine.Spy;
|
||||
let cleanerRemoveUnnecessaryBuildsSpy: jasmine.Spy;
|
||||
let existingBuildsDeferred: {resolve: Function, reject: Function};
|
||||
let openPrsDeferred: {resolve: Function, reject: Function};
|
||||
let promise: Promise<void>;
|
||||
|
||||
beforeEach(() => {
|
||||
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner as any, 'getExistingBuildNumbers').and.callFake(() => {
|
||||
return new Promise((resolve, reject) => existingBuildsDeferred = {resolve, reject});
|
||||
});
|
||||
cleanerGetOpenPrNumbersSpy = spyOn(cleaner as any, 'getOpenPrNumbers').and.callFake(() => {
|
||||
return new Promise((resolve, reject) => openPrsDeferred = {resolve, reject});
|
||||
});
|
||||
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner as any, 'removeUnnecessaryBuilds');
|
||||
|
||||
promise = cleaner.cleanUp();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should get the existing builds', () => {
|
||||
expect(cleanerGetExistingBuildNumbersSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should get the open PRs', () => {
|
||||
expect(cleanerGetOpenPrNumbersSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getExistingBuildNumbers()\' rejects', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
existingBuildsDeferred.reject('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getOpenPrNumbers()\' rejects', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
openPrsDeferred.reject('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'removeUnnecessaryBuilds()\' rejects', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.reject('Test'));
|
||||
existingBuildsDeferred.resolve();
|
||||
openPrsDeferred.resolve();
|
||||
});
|
||||
|
||||
|
||||
it('should pass existing builds and open PRs to \'removeUnnecessaryBuilds()\'', done => {
|
||||
promise.then(() => {
|
||||
expect(cleanerRemoveUnnecessaryBuildsSpy).toHaveBeenCalledWith('foo', 'bar');
|
||||
done();
|
||||
});
|
||||
|
||||
existingBuildsDeferred.resolve('foo');
|
||||
openPrsDeferred.resolve('bar');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the value returned by \'removeUnnecessaryBuilds()\'', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.resolve('Test'));
|
||||
existingBuildsDeferred.resolve();
|
||||
openPrsDeferred.resolve();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
// Protected methods
|
||||
|
||||
describe('getExistingBuildNumbers()', () => {
|
||||
let fsReaddirSpy: jasmine.Spy;
|
||||
let readdirCb: (err: any, files?: string[]) => void;
|
||||
let promise: Promise<number[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
|
||||
promise = (cleaner as any).getExistingBuildNumbers();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should get the contents of the builds directory', () => {
|
||||
expect(fsReaddirSpy).toHaveBeenCalled();
|
||||
expect(fsReaddirSpy.calls.argsFor(0)[0]).toBe('/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while getting the files', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned files (as numbers)', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual([12, 34, 56]);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, ['12', '34', '56']);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore files with non-numeric (or zero) names', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual([12, 34, 56]);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, ['12', 'foo', '34', 'bar', '56', '000']);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getOpenPrNumbers()', () => {
|
||||
let prDeferred: {resolve: Function, reject: Function};
|
||||
let promise: Promise<number[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
spyOn(GithubPullRequests.prototype, 'fetchAll').and.callFake(() => {
|
||||
return new Promise((resolve, reject) => prDeferred = {resolve, reject});
|
||||
});
|
||||
|
||||
promise = (cleaner as any).getOpenPrNumbers();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should fetch open PRs via \'GithubPullRequests\'', () => {
|
||||
expect(GithubPullRequests.prototype.fetchAll).toHaveBeenCalledWith('open');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while fetching PRs', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
prDeferred.reject('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the numbers of the fetched PRs', done => {
|
||||
promise.then(prNumbers => {
|
||||
expect(prNumbers).toEqual([1, 2, 3]);
|
||||
done();
|
||||
});
|
||||
|
||||
prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('removeDir()', () => {
|
||||
let shellChmodSpy: jasmine.Spy;
|
||||
let shellRmSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
shellChmodSpy = spyOn(shell, 'chmod');
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
});
|
||||
|
||||
|
||||
it('should remove the specified directory and its content', () => {
|
||||
(cleaner as any).removeDir('/foo/bar');
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', '/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should make the directory and its content writable before removing', () => {
|
||||
shellRmSpy.and.callFake(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar'));
|
||||
(cleaner as any).removeDir('/foo/bar');
|
||||
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should catch errors and log them', () => {
|
||||
const consoleErrorSpy = spyOn(console, 'error');
|
||||
shellRmSpy.and.callFake(() => { throw 'Test'; });
|
||||
|
||||
(cleaner as any).removeDir('/foo/bar');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain('Unable to remove \'/foo/bar\'');
|
||||
expect(consoleErrorSpy.calls.argsFor(0)[1]).toBe('Test');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('removeUnnecessaryBuilds()', () => {
|
||||
let consoleLogSpy: jasmine.Spy;
|
||||
let cleanerRemoveDirSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleLogSpy = spyOn(console, 'log');
|
||||
cleanerRemoveDirSpy = spyOn(cleaner as any, 'removeDir');
|
||||
});
|
||||
|
||||
|
||||
it('should log the number of existing builds, open PRs and builds to be removed', () => {
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], [3, 4, 5, 6]);
|
||||
|
||||
expect(console.log).toHaveBeenCalledWith('Existing builds: 3');
|
||||
expect(console.log).toHaveBeenCalledWith('Open pull requests: 4');
|
||||
expect(console.log).toHaveBeenCalledWith('Removing 2 build(s): 1, 2');
|
||||
});
|
||||
|
||||
|
||||
it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []);
|
||||
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
||||
});
|
||||
|
||||
|
||||
it('should remove the builds that do not correspond to open PRs', () => {
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(2);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0);
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/4');
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,410 @@
|
||||
// Imports
|
||||
import {EventEmitter} from 'events';
|
||||
import {ClientRequest, IncomingMessage} from 'http';
|
||||
import * as https from 'https';
|
||||
import {GithubApi} from '../../lib/common/github-api';
|
||||
|
||||
// Tests
|
||||
describe('GithubApi', () => {
|
||||
let api: GithubApi;
|
||||
|
||||
beforeEach(() => api = new GithubApi('12345'));
|
||||
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'githubToken\' is missing or empty', () => {
|
||||
expect(() => new GithubApi('')).toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('get()', () => {
|
||||
let apiBuildPathSpy: jasmine.Spy;
|
||||
let apiRequestSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
apiBuildPathSpy = spyOn(api as any, 'buildPath');
|
||||
apiRequestSpy = spyOn(api as any, 'request');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'buildPath()\' with the pathname and params', () => {
|
||||
api.get('/foo', {bar: 'baz'});
|
||||
|
||||
expect(apiBuildPathSpy).toHaveBeenCalled();
|
||||
expect(apiBuildPathSpy.calls.argsFor(0)).toEqual(['/foo', {bar: 'baz'}]);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'request()\' with the correct method', () => {
|
||||
api.get('/foo');
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[0]).toBe('get');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'request()\' with the correct path', () => {
|
||||
apiBuildPathSpy.and.returnValue('/foo/bar');
|
||||
api.get('foo');
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[1]).toBe('/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should not pass data to \'request()\'', () => {
|
||||
(api.get as Function)('foo', {}, {});
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[2]).toBeUndefined();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('post()', () => {
|
||||
let apiBuildPathSpy: jasmine.Spy;
|
||||
let apiRequestSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
apiBuildPathSpy = spyOn(api as any, 'buildPath');
|
||||
apiRequestSpy = spyOn(api as any, 'request');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'buildPath()\' with the pathname and params', () => {
|
||||
api.post('/foo', {bar: 'baz'});
|
||||
|
||||
expect(apiBuildPathSpy).toHaveBeenCalled();
|
||||
expect(apiBuildPathSpy.calls.argsFor(0)).toEqual(['/foo', {bar: 'baz'}]);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'request()\' with the correct method', () => {
|
||||
api.post('/foo');
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[0]).toBe('post');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'request()\' with the correct path', () => {
|
||||
apiBuildPathSpy.and.returnValue('/foo/bar');
|
||||
api.post('/foo');
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[1]).toBe('/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should pass the data to \'request()\'', () => {
|
||||
api.post('/foo', {}, {bar: 'baz'});
|
||||
|
||||
expect(apiRequestSpy).toHaveBeenCalled();
|
||||
expect(apiRequestSpy.calls.argsFor(0)[2]).toEqual({bar: 'baz'});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
// Protected methods
|
||||
|
||||
describe('buildPath()', () => {
|
||||
|
||||
it('should return the pathname if no params', () => {
|
||||
expect((api as any).buildPath('/foo')).toBe('/foo');
|
||||
expect((api as any).buildPath('/foo', undefined)).toBe('/foo');
|
||||
expect((api as any).buildPath('/foo', null)).toBe('/foo');
|
||||
});
|
||||
|
||||
|
||||
it('should append the params to the pathname', () => {
|
||||
expect((api as any).buildPath('/foo', {bar: 'baz'})).toBe('/foo?bar=baz');
|
||||
});
|
||||
|
||||
|
||||
it('should join the params with \'&\'', () => {
|
||||
expect((api as any).buildPath('/foo', {bar: 1, baz: 2})).toBe('/foo?bar=1&baz=2');
|
||||
});
|
||||
|
||||
|
||||
it('should ignore undefined/null params', () => {
|
||||
expect((api as any).buildPath('/foo', {bar: undefined, baz: null})).toBe('/foo');
|
||||
});
|
||||
|
||||
|
||||
it('should encode param values as URI components', () => {
|
||||
expect((api as any).buildPath('/foo', {bar: 'b a&z'})).toBe('/foo?bar=b%20a%26z');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getPaginated()', () => {
|
||||
let deferreds: {resolve: Function, reject: Function}[];
|
||||
|
||||
beforeEach(() => {
|
||||
deferreds = [];
|
||||
spyOn(api, 'get').and.callFake(() => new Promise((resolve, reject) => deferreds.push({resolve, reject})));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((api as any).getPaginated()).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should call \'get()\' with the correct pathname and params', () => {
|
||||
(api as any).getPaginated('/foo/bar');
|
||||
(api as any).getPaginated('/foo/bar', {baz: 'qux'});
|
||||
|
||||
expect(api.get).toHaveBeenCalledWith('/foo/bar', {page: 0, per_page: 100});
|
||||
expect(api.get).toHaveBeenCalledWith('/foo/bar', {baz: 'qux', page: 0, per_page: 100});
|
||||
});
|
||||
|
||||
|
||||
it('should reject if the request fails', done => {
|
||||
(api as any).getPaginated('/foo/bar').catch((err: any) => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].reject('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned items', done => {
|
||||
const items = [{id: 1}, {id: 2}];
|
||||
|
||||
(api as any).getPaginated('/foo/bar').then((data: any) => {
|
||||
expect(data).toEqual(items);
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].resolve(items);
|
||||
});
|
||||
|
||||
|
||||
it('should iteratively call \'get()\' to fetch all items', done => {
|
||||
// Create an array or 250 objects.
|
||||
const allItems = '.'.repeat(250).split('').map((_, i) => ({id: i}));
|
||||
const apiGetSpy = api.get as jasmine.Spy;
|
||||
|
||||
(api as any).getPaginated('/foo/bar', {baz: 'qux'}).then((data: any) => {
|
||||
const paramsForPage = (page: number) => ({baz: 'qux', page, per_page: 100});
|
||||
|
||||
expect(apiGetSpy).toHaveBeenCalledTimes(3);
|
||||
expect(apiGetSpy.calls.argsFor(0)).toEqual(['/foo/bar', paramsForPage(0)]);
|
||||
expect(apiGetSpy.calls.argsFor(1)).toEqual(['/foo/bar', paramsForPage(1)]);
|
||||
expect(apiGetSpy.calls.argsFor(2)).toEqual(['/foo/bar', paramsForPage(2)]);
|
||||
|
||||
expect(data).toEqual(allItems);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].resolve(allItems.slice(0, 100));
|
||||
setTimeout(() => {
|
||||
deferreds[1].resolve(allItems.slice(100, 200));
|
||||
setTimeout(() => {
|
||||
deferreds[2].resolve(allItems.slice(200));
|
||||
}, 0);
|
||||
}, 0);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('request()', () => {
|
||||
let httpsRequestSpy: jasmine.Spy;
|
||||
let latestRequest: ClientRequest;
|
||||
|
||||
beforeEach(() => {
|
||||
const originalRequest = https.request;
|
||||
|
||||
httpsRequestSpy = spyOn(https, 'request').and.callFake((...args: any[]) => {
|
||||
latestRequest = originalRequest.apply(https, args);
|
||||
|
||||
spyOn(latestRequest, 'on').and.callThrough();
|
||||
spyOn(latestRequest, 'end');
|
||||
|
||||
return latestRequest;
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((api as any).request()).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should call \'https.request()\' with the correct options', () => {
|
||||
(api as any).request('method', 'path');
|
||||
|
||||
expect(httpsRequestSpy).toHaveBeenCalled();
|
||||
expect(httpsRequestSpy.calls.argsFor(0)[0]).toEqual(jasmine.objectContaining({
|
||||
headers: jasmine.objectContaining({
|
||||
'User-Agent': `Node/${process.versions.node}`,
|
||||
}),
|
||||
host: 'api.github.com',
|
||||
method: 'method',
|
||||
path: 'path',
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
it('should call specify an \'Authorization\' header if \'githubToken\' is present', () => {
|
||||
(api as any).request('method', 'path');
|
||||
|
||||
expect(httpsRequestSpy).toHaveBeenCalled();
|
||||
expect(httpsRequestSpy.calls.argsFor(0)[0].headers).toEqual(jasmine.objectContaining({
|
||||
Authorization: 'token 12345',
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
it('should reject on request error', done => {
|
||||
(api as any).request('method', 'path').catch((err: any) => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
latestRequest.emit('error', 'Test');
|
||||
});
|
||||
|
||||
|
||||
it('should send the request (i.e. call \'end()\')', () => {
|
||||
(api as any).request('method', 'path');
|
||||
expect(latestRequest.end).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should \'JSON.stringify\' and send the data along with the request', () => {
|
||||
(api as any).request('method', 'path');
|
||||
expect(latestRequest.end).toHaveBeenCalledWith(null);
|
||||
|
||||
(api as any).request('method', 'path', {key: 'value'});
|
||||
expect(latestRequest.end).toHaveBeenCalledWith('{"key":"value"}');
|
||||
});
|
||||
|
||||
|
||||
describe('onResponse', () => {
|
||||
let promise: Promise<void>;
|
||||
let respond: (statusCode: number) => IncomingMessage;
|
||||
|
||||
beforeEach(() => {
|
||||
promise = (api as any).request('method', 'path');
|
||||
|
||||
respond = (statusCode: number) => {
|
||||
const mockResponse = new EventEmitter() as IncomingMessage;
|
||||
mockResponse.statusCode = statusCode;
|
||||
|
||||
const onResponse = httpsRequestSpy.calls.argsFor(0)[1];
|
||||
onResponse(mockResponse);
|
||||
|
||||
return mockResponse;
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
it('should reject on response error', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(200);
|
||||
res.emit('error', 'Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if returned statusCode is <200', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toContain('failed');
|
||||
expect(err).toContain('status: 199');
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(199);
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if returned statusCode is >=400', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toContain('failed');
|
||||
expect(err).toContain('status: 400');
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(400);
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should include the response text in the rejection message', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toContain('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(500);
|
||||
res.emit('data', 'Test');
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve if returned statusCode is <=200 <400', done => {
|
||||
promise.then(done);
|
||||
|
||||
const res = respond(200);
|
||||
res.emit('data', '{}');
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the response text \'JSON.parsed\'', done => {
|
||||
promise.then(data => {
|
||||
expect(data).toEqual({foo: 'bar'});
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(300);
|
||||
res.emit('data', '{"foo":"bar"}');
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should collect and concatenate the whole response text', done => {
|
||||
promise.then(data => {
|
||||
expect(data).toEqual({foo: 'bar', baz: 'qux'});
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(300);
|
||||
res.emit('data', '{"foo":');
|
||||
res.emit('data', '"bar","baz"');
|
||||
res.emit('data', ':"qux"}');
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if the response text is malformed JSON', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toEqual(jasmine.any(SyntaxError));
|
||||
done();
|
||||
});
|
||||
|
||||
const res = respond(300);
|
||||
res.emit('data', '}');
|
||||
res.emit('end');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,117 @@
|
||||
// Imports
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
|
||||
// Tests
|
||||
describe('GithubPullRequests', () => {
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'githubToken\' is missing or empty', () => {
|
||||
expect(() => new GithubPullRequests('', 'foo/bar')).
|
||||
toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'repoSlug\' is missing or empty', () => {
|
||||
expect(() => new GithubPullRequests('12345', '')).
|
||||
toThrowError('Missing or empty required parameter \'repoSlug\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('addComment()', () => {
|
||||
let prs: GithubPullRequests;
|
||||
let deferred: {resolve: Function, reject: Function};
|
||||
|
||||
beforeEach(() => {
|
||||
prs = new GithubPullRequests('12345', 'foo/bar');
|
||||
|
||||
spyOn(prs, 'post').and.callFake(() => new Promise((resolve, reject) => deferred = {resolve, reject}));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(prs.addComment(42, 'body')).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should throw if the PR number is invalid', () => {
|
||||
expect(() => prs.addComment(-1337, 'body')).toThrowError(`Invalid PR number: -1337`);
|
||||
expect(() => prs.addComment(NaN, 'body')).toThrowError(`Invalid PR number: NaN`);
|
||||
});
|
||||
|
||||
|
||||
it('should throw if the comment body is invalid or empty', () => {
|
||||
expect(() => prs.addComment(42, '')).toThrowError(`Invalid or empty comment body: `);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'post()\' with the correct pathname, params and data', () => {
|
||||
prs.addComment(42, 'body');
|
||||
|
||||
expect(prs.post).toHaveBeenCalledWith('/repos/foo/bar/issues/42/comments', null, {body: 'body'});
|
||||
});
|
||||
|
||||
|
||||
it('should reject if the request fails', done => {
|
||||
prs.addComment(42, 'body').catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
deferred.reject('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned response', done => {
|
||||
prs.addComment(42, 'body').then(data => {
|
||||
expect(data).toEqual('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
deferred.resolve('Test');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('fetchAll()', () => {
|
||||
let prs: GithubPullRequests;
|
||||
let prsGetPaginatedSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
prs = new GithubPullRequests('12345', 'foo/bar');
|
||||
prsGetPaginatedSpy = spyOn(prs as any, 'getPaginated');
|
||||
spyOn(console, 'log');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'getPaginated()\' with the correct pathname and params', () => {
|
||||
const expectedPathname = '/repos/foo/bar/pulls';
|
||||
|
||||
prs.fetchAll('all');
|
||||
prs.fetchAll('closed');
|
||||
prs.fetchAll('open');
|
||||
|
||||
expect(prsGetPaginatedSpy).toHaveBeenCalledTimes(3);
|
||||
expect(prsGetPaginatedSpy.calls.argsFor(0)).toEqual([expectedPathname, {state: 'all'}]);
|
||||
expect(prsGetPaginatedSpy.calls.argsFor(1)).toEqual([expectedPathname, {state: 'closed'}]);
|
||||
expect(prsGetPaginatedSpy.calls.argsFor(2)).toEqual([expectedPathname, {state: 'open'}]);
|
||||
});
|
||||
|
||||
|
||||
it('should default to \'all\' if no state is specified', () => {
|
||||
prs.fetchAll();
|
||||
expect(prsGetPaginatedSpy).toHaveBeenCalledWith('/repos/foo/bar/pulls', {state: 'all'});
|
||||
});
|
||||
|
||||
|
||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||
prsGetPaginatedSpy.and.returnValue('Test');
|
||||
expect(prs.fetchAll()).toBe('Test');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,232 @@
|
||||
// Imports
|
||||
import {GithubTeams} from '../../lib/common/github-teams';
|
||||
|
||||
// Tests
|
||||
describe('GithubTeams', () => {
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'githubToken\' is missing or empty', () => {
|
||||
expect(() => new GithubTeams('', 'org')).
|
||||
toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'organization\' is missing or empty', () => {
|
||||
expect(() => new GithubTeams('12345', '')).
|
||||
toThrowError('Missing or empty required parameter \'organization\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('fetchAll()', () => {
|
||||
let teams: GithubTeams;
|
||||
let teamsGetPaginatedSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
teams = new GithubTeams('12345', 'foo');
|
||||
teamsGetPaginatedSpy = spyOn(teams as any, 'getPaginated');
|
||||
});
|
||||
|
||||
|
||||
it('should call \'getPaginated()\' with the correct pathname and params', () => {
|
||||
teams.fetchAll();
|
||||
expect(teamsGetPaginatedSpy).toHaveBeenCalledWith('/orgs/foo/teams');
|
||||
});
|
||||
|
||||
|
||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||
teamsGetPaginatedSpy.and.returnValue('Test');
|
||||
expect(teams.fetchAll()).toBe('Test');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('isMemberById()', () => {
|
||||
let teams: GithubTeams;
|
||||
let teamsGetSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
teams = new GithubTeams('12345', 'foo');
|
||||
teamsGetSpy = spyOn(teams, 'get');
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(teams.isMemberById('user', [1])).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if called with an empty array', done => {
|
||||
teams.isMemberById('user', []).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(teamsGetSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'get()\' with the correct pathname', done => {
|
||||
teamsGetSpy.and.returnValue(Promise.resolve(null));
|
||||
teams.isMemberById('user', [1]).then(() => {
|
||||
expect(teamsGetSpy).toHaveBeenCalledWith('/teams/1/memberships/user');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'get()\' rejects', done => {
|
||||
teamsGetSpy.and.returnValue(Promise.reject(null));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(teamsGetSpy).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if the membership is not active', done => {
|
||||
teamsGetSpy.and.returnValue(Promise.resolve({state: 'pending'}));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(teamsGetSpy).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with true if the membership is active', done => {
|
||||
teamsGetSpy.and.returnValue(Promise.resolve({state: 'active'}));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should sequentially call \'get()\' until an active membership is found', done => {
|
||||
const trainedResponses: {[pathname: string]: Promise<{state: string}>} = {
|
||||
'/teams/1/memberships/user': Promise.resolve({state: 'pending'}),
|
||||
'/teams/2/memberships/user': Promise.reject(null),
|
||||
'/teams/3/memberships/user': Promise.resolve({state: 'active'}),
|
||||
};
|
||||
teamsGetSpy.and.callFake((pathname: string) => trainedResponses[pathname]);
|
||||
|
||||
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
|
||||
expect(isMember).toBe(true);
|
||||
|
||||
expect(teamsGetSpy).toHaveBeenCalledTimes(3);
|
||||
expect(teamsGetSpy.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(teamsGetSpy.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(teamsGetSpy.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if no active membership is found', done => {
|
||||
const trainedResponses: {[pathname: string]: Promise<{state: string}>} = {
|
||||
'/teams/1/memberships/user': Promise.resolve({state: 'pending'}),
|
||||
'/teams/2/memberships/user': Promise.reject(null),
|
||||
'/teams/3/memberships/user': Promise.resolve({state: 'not active'}),
|
||||
'/teams/4/memberships/user': Promise.reject(null),
|
||||
};
|
||||
teamsGetSpy.and.callFake((pathname: string) => trainedResponses[pathname]);
|
||||
|
||||
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
|
||||
expect(teamsGetSpy).toHaveBeenCalledTimes(4);
|
||||
expect(teamsGetSpy.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(teamsGetSpy.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(teamsGetSpy.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
expect(teamsGetSpy.calls.argsFor(3)[0]).toBe('/teams/4/memberships/user');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('isMemberBySlug()', () => {
|
||||
let teams: GithubTeams;
|
||||
let teamsFetchAllSpy: jasmine.Spy;
|
||||
let teamsIsMemberByIdSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
teams = new GithubTeams('12345', 'foo');
|
||||
|
||||
const mockResponse = Promise.resolve([{id: 1, slug: 'team1'}, {id: 2, slug: 'team2'}]);
|
||||
teamsFetchAllSpy = spyOn(teams, 'fetchAll').and.returnValue(mockResponse);
|
||||
teamsIsMemberByIdSpy = spyOn(teams, 'isMemberById');
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(teams.isMemberBySlug('user', ['team-slug'])).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should call \'fetchAll()\'', () => {
|
||||
teams.isMemberBySlug('user', ['team-slug']);
|
||||
expect(teamsFetchAllSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'fetchAll()\' rejects', done => {
|
||||
teamsFetchAllSpy.and.returnValue(Promise.reject(null));
|
||||
teams.isMemberBySlug('user', ['team-slug']).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'isMemberById()\' with the correct params if no team is found', done => {
|
||||
teams.isMemberBySlug('user', ['no-match']).then(() => {
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'isMemberById()\' with the correct params if teams are found', done => {
|
||||
const spy = teamsIsMemberByIdSpy;
|
||||
|
||||
Promise.all([
|
||||
teams.isMemberBySlug('user', ['team1']).then(() => expect(spy).toHaveBeenCalledWith('user', [1])),
|
||||
teams.isMemberBySlug('user', ['team2']).then(() => expect(spy).toHaveBeenCalledWith('user', [2])),
|
||||
teams.isMemberBySlug('user', ['team1', 'team2']).then(() => expect(spy).toHaveBeenCalledWith('user', [1, 2])),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'isMemberById()\' rejects', done => {
|
||||
teamsIsMemberByIdSpy.and.returnValue(Promise.reject(null));
|
||||
teams.isMemberBySlug('user', ['team1']).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the value \'isMemberById()\' resolves with', done => {
|
||||
teamsIsMemberByIdSpy.and.returnValues(Promise.resolve(false), Promise.resolve(true));
|
||||
|
||||
Promise.all([
|
||||
teams.isMemberBySlug('user', ['team1']).then(isMember => expect(isMember).toBe(false)),
|
||||
teams.isMemberBySlug('user', ['team1']).then(isMember => expect(isMember).toBe(true)),
|
||||
]).then(() => {
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledTimes(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,81 @@
|
||||
// Imports
|
||||
import {assertNotMissingOrEmpty, getEnvVar} from '../../lib/common/utils';
|
||||
|
||||
// Tests
|
||||
describe('utils', () => {
|
||||
|
||||
describe('assertNotMissingOrEmpty()', () => {
|
||||
|
||||
it('should throw if passed an empty value', () => {
|
||||
expect(() => assertNotMissingOrEmpty('foo', undefined)).
|
||||
toThrowError('Missing or empty required parameter \'foo\'!');
|
||||
expect(() => assertNotMissingOrEmpty('bar', null)).toThrowError('Missing or empty required parameter \'bar\'!');
|
||||
expect(() => assertNotMissingOrEmpty('baz', '')).toThrowError('Missing or empty required parameter \'baz\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should not throw if passed a non-empty value', () => {
|
||||
expect(() => assertNotMissingOrEmpty('foo', ' ')).not.toThrow();
|
||||
expect(() => assertNotMissingOrEmpty('bar', 'bar')).not.toThrow();
|
||||
expect(() => assertNotMissingOrEmpty('baz', 'b a z')).not.toThrow();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getEnvVar()', () => {
|
||||
const emptyVar = '$$test_utils_getEnvVar_empty$$';
|
||||
const nonEmptyVar = '$$test_utils_getEnvVar_nonEmpty$$';
|
||||
const undefinedVar = '$$test_utils_getEnvVar_undefined$$';
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[emptyVar] = '';
|
||||
process.env[nonEmptyVar] = 'foo';
|
||||
});
|
||||
afterEach(() => {
|
||||
delete process.env[emptyVar];
|
||||
delete process.env[nonEmptyVar];
|
||||
});
|
||||
|
||||
|
||||
it('should return an environment variable', () => {
|
||||
expect(getEnvVar(nonEmptyVar)).toBe('foo');
|
||||
});
|
||||
|
||||
|
||||
it('should exit with an error if the environment variable is not defined', () => {
|
||||
const consoleErrorSpy = spyOn(console, 'error');
|
||||
const processExitSpy = spyOn(process, 'exit');
|
||||
|
||||
getEnvVar(undefinedVar);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain(undefinedVar);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
|
||||
it('should exit with an error if the environment variable is empty', () => {
|
||||
const consoleErrorSpy = spyOn(console, 'error');
|
||||
const processExitSpy = spyOn(process, 'exit');
|
||||
|
||||
getEnvVar(emptyVar);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain(emptyVar);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
|
||||
it('should return an empty string if an undefined variable is optional', () => {
|
||||
expect(getEnvVar(undefinedVar, true)).toBe('');
|
||||
});
|
||||
|
||||
|
||||
it('should return an empty string if an empty variable is optional', () => {
|
||||
expect(getEnvVar(emptyVar, true)).toBe('');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,6 @@
|
||||
declare namespace jasmine {
|
||||
export interface DoneFn extends Function {
|
||||
(): void;
|
||||
fail: (message: Error | string) => void;
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
// Imports
|
||||
import {runTests} from '../lib/common/run-tests';
|
||||
|
||||
// Run
|
||||
const specFiles = [`${__dirname}/**/*.spec.js`];
|
||||
const helpers = [`${__dirname}/helpers.js`];
|
||||
runTests(specFiles, helpers);
|
@ -0,0 +1,320 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import {EventEmitter} from 'events';
|
||||
import * as fs from 'fs';
|
||||
import * as shell from 'shelljs';
|
||||
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
||||
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||
import {UploadError} from '../../lib/upload-server/upload-error';
|
||||
import {expectToBeUploadError} from './helpers';
|
||||
|
||||
// Tests
|
||||
describe('BuildCreator', () => {
|
||||
const pr = '9';
|
||||
const sha = '9'.repeat(40);
|
||||
const archive = 'snapshot.tar.gz';
|
||||
const buildsDir = 'builds/dir';
|
||||
const prDir = `${buildsDir}/${pr}`;
|
||||
const shaDir = `${prDir}/${sha}`;
|
||||
let bc: BuildCreator;
|
||||
|
||||
beforeEach(() => bc = new BuildCreator(buildsDir));
|
||||
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'buildsDir\' is missing or empty', () => {
|
||||
expect(() => new BuildCreator('')).toThrowError('Missing or empty required parameter \'buildsDir\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should extend EventEmitter', () => {
|
||||
expect(bc).toEqual(jasmine.any(BuildCreator));
|
||||
expect(bc).toEqual(jasmine.any(EventEmitter));
|
||||
|
||||
expect(Object.getPrototypeOf(bc)).toBe(BuildCreator.prototype);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('create()', () => {
|
||||
let bcEmitSpy: jasmine.Spy;
|
||||
let bcExistsSpy: jasmine.Spy;
|
||||
let bcExtractArchiveSpy: jasmine.Spy;
|
||||
let shellMkdirSpy: jasmine.Spy;
|
||||
let shellRmSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
bcEmitSpy = spyOn(bc, 'emit');
|
||||
bcExistsSpy = spyOn(bc as any, 'exists');
|
||||
bcExtractArchiveSpy = spyOn(bc as any, 'extractArchive');
|
||||
shellMkdirSpy = spyOn(shell, 'mkdir');
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', done => {
|
||||
const promise = bc.create(pr, sha, archive);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `extractArchive()`.
|
||||
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should throw if the build does already exist', done => {
|
||||
bcExistsSpy.and.returnValue(true);
|
||||
bc.create(pr, sha, archive).catch(err => {
|
||||
expectToBeUploadError(err, 403, `Request to overwrite existing directory: ${shaDir}`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should create the build directory (and any parent directories)', done => {
|
||||
bc.create(pr, sha, archive).
|
||||
then(() => expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should extract the archive contents into the build directory', done => {
|
||||
bc.create(pr, sha, archive).
|
||||
then(() => expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should emit a CreatedBuildEvent on success', done => {
|
||||
let emitted = false;
|
||||
|
||||
bcEmitSpy.and.callFake((type: string, evt: CreatedBuildEvent) => {
|
||||
expect(type).toBe(CreatedBuildEvent.type);
|
||||
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
||||
expect(evt.pr).toBe(+pr);
|
||||
expect(evt.sha).toBe(sha);
|
||||
|
||||
emitted = true;
|
||||
});
|
||||
|
||||
bc.create(pr, sha, archive).
|
||||
then(() => expect(emitted).toBe(true)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('on error', () => {
|
||||
|
||||
it('should abort and skip further operations if it fails to create the directories', done => {
|
||||
shellMkdirSpy.and.throwError('');
|
||||
bc.create(pr, sha, archive).catch(() => {
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to extract the archive', done => {
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
bc.create(pr, sha, archive).catch(() => {
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should delete the PR directory (for new PR)', done => {
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
bc.create(pr, sha, archive).catch(() => {
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should delete the SHA directory (for existing PR)', done => {
|
||||
bcExistsSpy.and.callFake((path: string) => path !== shaDir);
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
|
||||
bc.create(pr, sha, archive).catch(() => {
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should reject with an UploadError', done => {
|
||||
shellMkdirSpy.and.callFake(() => {throw 'Test'; });
|
||||
bc.create(pr, sha, archive).catch(err => {
|
||||
expectToBeUploadError(err, 500, `Error while uploading to directory: ${shaDir}\nTest`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should pass UploadError instances unmodified', done => {
|
||||
shellMkdirSpy.and.callFake(() => { throw new UploadError(543, 'Test'); });
|
||||
bc.create(pr, sha, archive).catch(err => {
|
||||
expectToBeUploadError(err, 543, 'Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
// Protected methods
|
||||
|
||||
describe('exists()', () => {
|
||||
let fsAccessSpy: jasmine.Spy;
|
||||
let fsAccessCbs: Function[];
|
||||
|
||||
beforeEach(() => {
|
||||
fsAccessCbs = [];
|
||||
fsAccessSpy = spyOn(fs, 'access').and.callFake((_: string, cb: Function) => fsAccessCbs.push(cb));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((bc as any).exists('foo')).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should call \'fs.access()\' with the specified argument', () => {
|
||||
(bc as any).exists('foo');
|
||||
expect(fs.access).toHaveBeenCalledWith('foo', jasmine.any(Function));
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with \'true\' if \'fs.access()\' succeeds', done => {
|
||||
Promise.
|
||||
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
|
||||
then(results => expect(results).toEqual([true, true])).
|
||||
then(done);
|
||||
|
||||
fsAccessCbs[0]();
|
||||
fsAccessCbs[1](null);
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with \'false\' if \'fs.access()\' errors', done => {
|
||||
Promise.
|
||||
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
|
||||
then(results => expect(results).toEqual([false, false])).
|
||||
then(done);
|
||||
|
||||
fsAccessCbs[0]('Error');
|
||||
fsAccessCbs[1](new Error());
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('extractArchive()', () => {
|
||||
let consoleWarnSpy: jasmine.Spy;
|
||||
let shellChmodSpy: jasmine.Spy;
|
||||
let shellRmSpy: jasmine.Spy;
|
||||
let cpExecSpy: jasmine.Spy;
|
||||
let cpExecCbs: Function[];
|
||||
|
||||
beforeEach(() => {
|
||||
cpExecCbs = [];
|
||||
|
||||
consoleWarnSpy = spyOn(console, 'warn');
|
||||
shellChmodSpy = spyOn(shell, 'chmod');
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
cpExecSpy = spyOn(cp, 'exec').and.callFake((_: string, cb: Function) => cpExecCbs.push(cb));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((bc as any).extractArchive('foo', 'bar')).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should "gunzip" and "untar" the input file into the output directory', () => {
|
||||
const cmd = 'tar --extract --gzip --directory "output/dir" --file "input/file"';
|
||||
|
||||
(bc as any).extractArchive('input/file', 'output/dir');
|
||||
expect(cpExecSpy).toHaveBeenCalledWith(cmd, jasmine.any(Function));
|
||||
});
|
||||
|
||||
|
||||
it('should log (as a warning) any stderr output if extracting succeeded', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').
|
||||
then(() => expect(consoleWarnSpy).toHaveBeenCalledWith('This is stderr')).
|
||||
then(done);
|
||||
|
||||
cpExecCbs[0](null, 'This is stdout', 'This is stderr');
|
||||
});
|
||||
|
||||
|
||||
it('should make the build directory non-writable', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').
|
||||
then(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a-w', 'bar')).
|
||||
then(done);
|
||||
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
|
||||
it('should delete the uploaded file on success', done => {
|
||||
(bc as any).extractArchive('input/file', 'output/dir').
|
||||
then(() => expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file')).
|
||||
then(done);
|
||||
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
|
||||
describe('on error', () => {
|
||||
|
||||
it('should abort and skip further operations if it fails to extract the archive', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).not.toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
cpExecCbs[0]('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to make non-writable', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
shellChmodSpy.and.callFake(() => { throw 'Test'; });
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
|
||||
it('should abort and reject if it fails to remove the uploaded file', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
shellRmSpy.and.callFake(() => { throw 'Test'; });
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,61 @@
|
||||
// Imports
|
||||
import {BuildEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||
|
||||
// Tests
|
||||
describe('BuildEvent', () => {
|
||||
let evt: BuildEvent;
|
||||
|
||||
beforeEach(() => evt = new BuildEvent('foo', 42, 'bar'));
|
||||
|
||||
|
||||
it('should have a \'type\' property', () => {
|
||||
expect(evt.type).toBe('foo');
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'pr\' property', () => {
|
||||
expect(evt.pr).toBe(42);
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'sha\' property', () => {
|
||||
expect(evt.sha).toBe('bar');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('CreatedBuildEvent', () => {
|
||||
let evt: CreatedBuildEvent;
|
||||
|
||||
beforeEach(() => evt = new CreatedBuildEvent(42, 'bar'));
|
||||
|
||||
|
||||
it('should have a static \'type\' property', () => {
|
||||
expect(CreatedBuildEvent.type).toBe('build.created');
|
||||
});
|
||||
|
||||
|
||||
it('should extend BuildEvent', () => {
|
||||
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
||||
expect(evt).toEqual(jasmine.any(BuildEvent));
|
||||
|
||||
expect(Object.getPrototypeOf(evt)).toBe(CreatedBuildEvent.prototype);
|
||||
});
|
||||
|
||||
|
||||
it('should automatically set the \'type\'', () => {
|
||||
expect(evt.type).toBe(CreatedBuildEvent.type);
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'pr\' property', () => {
|
||||
expect(evt.pr).toBe(42);
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'sha\' property', () => {
|
||||
expect(evt.sha).toBe('bar');
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,261 @@
|
||||
// Imports
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
import {GithubTeams} from '../../lib/common/github-teams';
|
||||
import {BuildVerifier} from '../../lib/upload-server/build-verifier';
|
||||
import {expectToBeUploadError} from './helpers';
|
||||
|
||||
// Tests
|
||||
describe('BuildVerifier', () => {
|
||||
const defaultConfig = {
|
||||
allowedTeamSlugs: ['team1', 'team2'],
|
||||
githubToken: 'githubToken',
|
||||
organization: 'organization',
|
||||
repoSlug: 'repo/slug',
|
||||
secret: 'secret',
|
||||
};
|
||||
let bv: BuildVerifier;
|
||||
|
||||
// Helpers
|
||||
const createBuildVerifier = (partialConfig: Partial<typeof defaultConfig> = {}) => {
|
||||
const cfg = {...defaultConfig, ...partialConfig};
|
||||
return new BuildVerifier(cfg.secret, cfg.githubToken, cfg.repoSlug, cfg.organization,
|
||||
cfg.allowedTeamSlugs);
|
||||
};
|
||||
|
||||
beforeEach(() => bv = createBuildVerifier());
|
||||
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
['secret', 'githubToken', 'repoSlug', 'organization', 'allowedTeamSlugs'].forEach(param => {
|
||||
it(`should throw if '${param}' is missing or empty`, () => {
|
||||
expect(() => createBuildVerifier({[param]: ''})).
|
||||
toThrowError(`Missing or empty required parameter '${param}'!`);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'allowedTeamSlugs\' is an empty array', () => {
|
||||
expect(() => createBuildVerifier({allowedTeamSlugs: []})).
|
||||
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('verify()', () => {
|
||||
const pr = 9;
|
||||
const defaultJwt = {
|
||||
'exp': Math.floor(Date.now() / 1000) + 30,
|
||||
'iat': Math.floor(Date.now() / 1000) - 30,
|
||||
'iss': 'Travis CI, GmbH',
|
||||
'pull-request': pr,
|
||||
'slug': defaultConfig.repoSlug,
|
||||
};
|
||||
let bvGetPrAuthorTeamMembership: jasmine.Spy;
|
||||
|
||||
// Heleprs
|
||||
const createAuthHeader = (partialJwt: Partial<typeof defaultJwt> = {}, secret: string = defaultConfig.secret) =>
|
||||
`Token ${jwt.sign({...defaultJwt, ...partialJwt}, secret)}`;
|
||||
|
||||
beforeEach(() => {
|
||||
bvGetPrAuthorTeamMembership = spyOn(bv, 'getPrAuthorTeamMembership').
|
||||
and.returnValue(Promise.resolve({author: 'some-author', isMember: true}));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', done => {
|
||||
const promise = bv.verify(pr, createAuthHeader());
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `bvGetPrAuthorTeamMembership()`.
|
||||
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the authorization header is invalid', done => {
|
||||
bv.verify(pr, 'foo').catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: jwt malformed';
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the secret is invalid', done => {
|
||||
bv.verify(pr, createAuthHeader({}, 'foo')).catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: invalid signature';
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the issuer is invalid', done => {
|
||||
bv.verify(pr, createAuthHeader({iss: 'not valid'})).catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: ' +
|
||||
`jwt issuer invalid. expected: ${defaultJwt.iss}`;
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the token has expired', done => {
|
||||
bv.verify(pr, createAuthHeader({exp: 0})).catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: jwt expired';
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the repo slug does not match', done => {
|
||||
bv.verify(pr, createAuthHeader({slug: 'foo/bar'})).catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: ' +
|
||||
`jwt slug invalid. expected: ${defaultConfig.repoSlug}`;
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if the PR does not match', done => {
|
||||
bv.verify(pr, createAuthHeader({'pull-request': 1337})).catch(err => {
|
||||
const errorMessage = 'Error while verifying upload for PR 9: ' +
|
||||
`jwt pull-request invalid. expected: ${pr}`;
|
||||
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should not fail if the token is valid', done => {
|
||||
bv.verify(pr, createAuthHeader()).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not fail even if the token has been issued in the future', done => {
|
||||
const in30s = Math.floor(Date.now() / 1000) + 30;
|
||||
bv.verify(pr, createAuthHeader({iat: in30s})).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'getPrAuthorTeamMembership()\' if the token is valid', done => {
|
||||
bv.verify(pr, createAuthHeader()).then(() => {
|
||||
expect(bvGetPrAuthorTeamMembership).toHaveBeenCalledWith(pr);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if \'getPrAuthorTeamMembership()\' rejects', done => {
|
||||
bvGetPrAuthorTeamMembership.and.callFake(() => Promise.reject('Test'));
|
||||
bv.verify(pr, createAuthHeader()).catch(err => {
|
||||
expectToBeUploadError(err, 403, `Error while verifying upload for PR ${pr}: Test`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if \'getPrAuthorTeamMembership()\' reports no membership', done => {
|
||||
const errorMessage = `Error while verifying upload for PR ${pr}: User 'test' is not an active member of any of ` +
|
||||
'the following teams: team1, team2';
|
||||
|
||||
bvGetPrAuthorTeamMembership.and.returnValue(Promise.resolve({author: 'test', isMember: false}));
|
||||
bv.verify(pr, createAuthHeader()).catch(err => {
|
||||
expectToBeUploadError(err, 403, errorMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should succeed if everything checks outs', done => {
|
||||
bv.verify(pr, createAuthHeader()).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getPrAuthorTeamMembership()', () => {
|
||||
const pr = 9;
|
||||
let prsFetchSpy: jasmine.Spy;
|
||||
let teamsIsMemberBySlugSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
|
||||
and.returnValue(Promise.resolve({user: {login: 'username'}}));
|
||||
|
||||
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
|
||||
and.returnValue(Promise.resolve(true));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', done => {
|
||||
const promise = bv.getPrAuthorTeamMembership(pr);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
|
||||
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should fetch the corresponding PR', done => {
|
||||
bv.getPrAuthorTeamMembership(pr).then(() => {
|
||||
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if fetching the PR errors', done => {
|
||||
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
|
||||
bv.getPrAuthorTeamMembership(pr).catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should verify the PR author\'s membership in the specified teams', done => {
|
||||
bv.getPrAuthorTeamMembership(pr).then(() => {
|
||||
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if verifying membership errors', done => {
|
||||
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
|
||||
bv.getPrAuthorTeamMembership(pr).catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should return the PR\'s author and whether they are members', done => {
|
||||
teamsIsMemberBySlugSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||
|
||||
Promise.all([
|
||||
bv.getPrAuthorTeamMembership(pr).then(({author, isMember}) => {
|
||||
expect(author).toBe('username');
|
||||
expect(isMember).toBe(true);
|
||||
}),
|
||||
bv.getPrAuthorTeamMembership(pr).then(({author, isMember}) => {
|
||||
expect(author).toBe('username');
|
||||
expect(isMember).toBe(false);
|
||||
}),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,11 @@
|
||||
import {UploadError} from '../../lib/upload-server/upload-error';
|
||||
|
||||
export const expectToBeUploadError = (actual: UploadError, status?: number, message?: string) => {
|
||||
expect(actual).toEqual(jasmine.any(UploadError));
|
||||
if (status != null) {
|
||||
expect(actual.status).toBe(status);
|
||||
}
|
||||
if (message != null) {
|
||||
expect(actual.message).toBe(message);
|
||||
}
|
||||
};
|
@ -0,0 +1,39 @@
|
||||
// Imports
|
||||
import {UploadError} from '../../lib/upload-server/upload-error';
|
||||
|
||||
// Tests
|
||||
describe('UploadError', () => {
|
||||
let err: UploadError;
|
||||
|
||||
beforeEach(() => err = new UploadError(999, 'message'));
|
||||
|
||||
|
||||
it('should extend Error', () => {
|
||||
expect(err).toEqual(jasmine.any(UploadError));
|
||||
expect(err).toEqual(jasmine.any(Error));
|
||||
|
||||
expect(Object.getPrototypeOf(err)).toBe(UploadError.prototype);
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'status\' property', () => {
|
||||
expect(err.status).toBe(999);
|
||||
});
|
||||
|
||||
|
||||
it('should have a \'message\' property', () => {
|
||||
expect(err.message).toBe('message');
|
||||
});
|
||||
|
||||
|
||||
it('should have a 500 \'status\' by default', () => {
|
||||
expect(new UploadError().status).toBe(500);
|
||||
});
|
||||
|
||||
|
||||
it('should have an empty \'message\' by default', () => {
|
||||
expect(new UploadError().message).toBe('');
|
||||
expect(new UploadError(999).message).toBe('');
|
||||
});
|
||||
|
||||
});
|
@ -0,0 +1,403 @@
|
||||
// Imports
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import * as supertest from 'supertest';
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
||||
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||
import {BuildVerifier} from '../../lib/upload-server/build-verifier';
|
||||
import {uploadServerFactory as usf} from '../../lib/upload-server/upload-server-factory';
|
||||
|
||||
// Tests
|
||||
describe('uploadServerFactory', () => {
|
||||
const defaultConfig = {
|
||||
buildsDir: 'builds/dir',
|
||||
domainName: 'domain.name',
|
||||
githubOrganization: 'organization',
|
||||
githubTeamSlugs: ['team1', 'team2'],
|
||||
githubToken: '12345',
|
||||
repoSlug: 'repo/slug',
|
||||
secret: 'secret',
|
||||
};
|
||||
|
||||
// Helpers
|
||||
const createUploadServer = (partialConfig: Partial<typeof defaultConfig> = {}) =>
|
||||
usf.create({...defaultConfig, ...partialConfig});
|
||||
|
||||
|
||||
describe('create()', () => {
|
||||
let usfCreateMiddlewareSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
usfCreateMiddlewareSpy = spyOn(usf as any, 'createMiddleware').and.callThrough();
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'buildsDir\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({buildsDir: ''})).
|
||||
toThrowError('Missing or empty required parameter \'buildsDir\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'domainName\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({domainName: ''})).
|
||||
toThrowError('Missing or empty required parameter \'domainName\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubToken\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({githubToken: ''})).
|
||||
toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubOrganization\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({githubOrganization: ''})).
|
||||
toThrowError('Missing or empty required parameter \'organization\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubTeamSlugs\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({githubTeamSlugs: []})).
|
||||
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'repoSlug\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({repoSlug: ''})).
|
||||
toThrowError('Missing or empty required parameter \'repoSlug\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'secret\' is missing or empty', () => {
|
||||
expect(() => createUploadServer({secret: ''})).
|
||||
toThrowError('Missing or empty required parameter \'secret\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should return an http.Server', () => {
|
||||
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
|
||||
const server = createUploadServer();
|
||||
|
||||
expect(server).toBe(httpCreateServerSpy.calls.mostRecent().returnValue);
|
||||
});
|
||||
|
||||
|
||||
it('should create and use an appropriate BuildCreator', () => {
|
||||
const usfCreateBuildCreatorSpy = spyOn(usf as any, 'createBuildCreator').and.callThrough();
|
||||
|
||||
createUploadServer();
|
||||
const buildCreator: BuildCreator = usfCreateBuildCreatorSpy.calls.mostRecent().returnValue;
|
||||
|
||||
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(jasmine.any(BuildVerifier), buildCreator);
|
||||
expect(usfCreateBuildCreatorSpy).toHaveBeenCalledWith('builds/dir', '12345', 'repo/slug', 'domain.name');
|
||||
});
|
||||
|
||||
|
||||
it('should create and use an appropriate middleware', () => {
|
||||
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
|
||||
|
||||
createUploadServer();
|
||||
const middleware: express.Express = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
|
||||
const buildVerifier = jasmine.any(BuildVerifier);
|
||||
const buildCreator = jasmine.any(BuildCreator);
|
||||
|
||||
expect(httpCreateServerSpy).toHaveBeenCalledWith(middleware);
|
||||
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(buildVerifier, buildCreator);
|
||||
});
|
||||
|
||||
|
||||
it('should log the server address info on \'listening\'', () => {
|
||||
const consoleInfoSpy = spyOn(console, 'info');
|
||||
const server = createUploadServer('builds/dir');
|
||||
server.address = () => ({address: 'foo', family: '', port: 1337});
|
||||
|
||||
expect(consoleInfoSpy).not.toHaveBeenCalled();
|
||||
|
||||
server.emit('listening');
|
||||
expect(consoleInfoSpy).toHaveBeenCalledWith('Up and running (and listening on foo:1337)...');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
// Protected methods
|
||||
|
||||
describe('createBuildCreator()', () => {
|
||||
let buildCreator: BuildCreator;
|
||||
|
||||
beforeEach(() => {
|
||||
buildCreator = (usf as any).createBuildCreator(
|
||||
defaultConfig.buildsDir,
|
||||
defaultConfig.githubToken,
|
||||
defaultConfig.repoSlug,
|
||||
defaultConfig.domainName,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
it('should pass the \'buildsDir\' to the BuildCreator', () => {
|
||||
expect((buildCreator as any).buildsDir).toBe('builds/dir');
|
||||
});
|
||||
|
||||
|
||||
it('should post a comment on GitHub on \'build.created\'', () => {
|
||||
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
|
||||
const commentBody = 'The angular.io preview for 1234567 is available [here][1].\n\n' +
|
||||
'[1]: https://pr42-1234567890.domain.name/';
|
||||
|
||||
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890'});
|
||||
|
||||
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
|
||||
});
|
||||
|
||||
|
||||
it('should pass the correct \'githubToken\' and \'repoSlug\' to GithubPullRequests', () => {
|
||||
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
|
||||
|
||||
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890'});
|
||||
const prs = prsAddCommentSpy.calls.mostRecent().object;
|
||||
|
||||
expect(prs).toEqual(jasmine.any(GithubPullRequests));
|
||||
expect((prs as any).repoSlug).toBe('repo/slug');
|
||||
expect((prs as any).requestHeaders.Authorization).toContain('12345');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('createMiddleware()', () => {
|
||||
let buildVerifier: BuildVerifier;
|
||||
let buildCreator: BuildCreator;
|
||||
let agent: supertest.SuperTest<supertest.Test>;
|
||||
|
||||
// Helpers
|
||||
const promisifyRequest = (req: supertest.Request) =>
|
||||
new Promise((resolve, reject) => req.end(err => err ? reject(err) : resolve()));
|
||||
const verifyRequests = (reqs: supertest.Request[], done: jasmine.DoneFn) =>
|
||||
Promise.all(reqs.map(promisifyRequest)).then(done, done.fail);
|
||||
|
||||
beforeEach(() => {
|
||||
buildVerifier = new BuildVerifier(
|
||||
defaultConfig.secret,
|
||||
defaultConfig.githubToken,
|
||||
defaultConfig.repoSlug,
|
||||
defaultConfig.githubOrganization,
|
||||
defaultConfig.githubTeamSlugs,
|
||||
);
|
||||
buildCreator = new BuildCreator(defaultConfig.buildsDir);
|
||||
agent = supertest.agent((usf as any).createMiddleware(buildVerifier, buildCreator));
|
||||
|
||||
spyOn(console, 'error');
|
||||
});
|
||||
|
||||
|
||||
describe('GET /create-build/<pr>/<sha>', () => {
|
||||
const pr = '9';
|
||||
const sha = '9'.repeat(40);
|
||||
let buildVerifierVerifySpy: jasmine.Spy;
|
||||
let buildCreatorCreateSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
buildVerifierVerifySpy = spyOn(buildVerifier, 'verify').and.returnValue(Promise.resolve());
|
||||
buildCreatorCreateSpy = spyOn(buildCreator, 'create').and.returnValue(Promise.resolve());
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 405 for non-GET requests', done => {
|
||||
verifyRequests([
|
||||
agent.put(`/create-build/${pr}/${sha}`).expect(405),
|
||||
agent.post(`/create-build/${pr}/${sha}`).expect(405),
|
||||
agent.patch(`/create-build/${pr}/${sha}`).expect(405),
|
||||
agent.delete(`/create-build/${pr}/${sha}`).expect(405),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 401 for requests without an \'AUTHORIZATION\' header', done => {
|
||||
const url = `/create-build/${pr}/${sha}`;
|
||||
const responseBody = `Missing or empty 'AUTHORIZATION' header in request: GET ${url}`;
|
||||
|
||||
verifyRequests([
|
||||
agent.get(url).expect(401, responseBody),
|
||||
agent.get(url).set('AUTHORIZATION', '').expect(401, responseBody),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without an \'X-FILE\' header', done => {
|
||||
const url = `/create-build/${pr}/${sha}`;
|
||||
const responseBody = `Missing or empty 'X-FILE' header in request: GET ${url}`;
|
||||
|
||||
const request1 = agent.get(url).set('AUTHORIZATION', 'foo');
|
||||
const request2 = agent.get(url).set('AUTHORIZATION', 'foo').set('X-FILE', '');
|
||||
|
||||
verifyRequests([
|
||||
request1.expect(400, responseBody),
|
||||
request2.expect(400, responseBody),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
verifyRequests([
|
||||
agent.get(`/foo/create-build/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/foo-create-build/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/fooncreate-build/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/create-build/foo/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/create-build-foo/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/create-buildnfoo/${pr}/${sha}`).expect(404),
|
||||
agent.get(`/create-build/pr${pr}/${sha}`).expect(404),
|
||||
agent.get(`/create-build/${pr}/${sha}42`).expect(404),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'BuildVerifier#verify()\' with the correct arguments', done => {
|
||||
const req = agent.
|
||||
get(`/create-build/${pr}/${sha}`).
|
||||
set('AUTHORIZATION', 'foo').
|
||||
set('X-FILE', 'bar');
|
||||
|
||||
promisifyRequest(req).
|
||||
then(() => expect(buildVerifierVerifySpy).toHaveBeenCalledWith(9, 'foo')).
|
||||
then(done, done.fail);
|
||||
});
|
||||
|
||||
|
||||
it('should propagate errors from BuildVerifier', done => {
|
||||
buildVerifierVerifySpy.and.callFake(() => Promise.reject('Test'));
|
||||
|
||||
const req = agent.
|
||||
get(`/create-build/${pr}/${sha}`).
|
||||
set('AUTHORIZATION', 'foo').
|
||||
set('X-FILE', 'bar').
|
||||
expect(500, 'Test');
|
||||
|
||||
promisifyRequest(req).
|
||||
then(() => {
|
||||
expect(buildVerifierVerifySpy).toHaveBeenCalledWith(9, 'foo');
|
||||
expect(buildCreatorCreateSpy).not.toHaveBeenCalled();
|
||||
}).
|
||||
then(done, done.fail);
|
||||
});
|
||||
|
||||
|
||||
it('should call \'BuildCreator#create()\' with the correct arguments', done => {
|
||||
const req = agent.
|
||||
get(`/create-build/${pr}/${sha}`).
|
||||
set('AUTHORIZATION', 'foo').
|
||||
set('X-FILE', 'bar');
|
||||
|
||||
promisifyRequest(req).
|
||||
then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar')).
|
||||
then(done, done.fail);
|
||||
});
|
||||
|
||||
|
||||
it('should propagate errors from BuildCreator', done => {
|
||||
buildCreatorCreateSpy.and.callFake(() => Promise.reject('Test'));
|
||||
const req = agent.
|
||||
get(`/create-build/${pr}/${sha}`).
|
||||
set('AUTHORIZATION', 'foo').
|
||||
set('X-FILE', 'bar').
|
||||
expect(500, 'Test');
|
||||
|
||||
verifyRequests([req], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 201 on successful upload', done => {
|
||||
const req = agent.
|
||||
get(`/create-build/${pr}/${sha}`).
|
||||
set('AUTHORIZATION', 'foo').
|
||||
set('X-FILE', 'bar').
|
||||
expect(201, http.STATUS_CODES[201]);
|
||||
|
||||
verifyRequests([req], done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
verifyRequests([agent.get(`/create-build/0${pr}/${sha}`).expect(404)], done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const sha40 = '0'.repeat(40);
|
||||
const sha41 = `0${sha40}`;
|
||||
|
||||
const request40 = agent.get(`/create-build/${pr}/${sha40}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
|
||||
const request41 = agent.get(`/create-build/${pr}/${sha41}`).set('AUTHORIZATION', 'baz').set('X-FILE', 'qux');
|
||||
|
||||
Promise.all([
|
||||
promisifyRequest(request40.expect(201)),
|
||||
promisifyRequest(request41.expect(404)),
|
||||
]).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('GET /health-check', () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
verifyRequests([
|
||||
agent.get('/health-check').expect(200),
|
||||
agent.get('/health-check/').expect(200),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 405 for non-GET requests', done => {
|
||||
verifyRequests([
|
||||
agent.put('/health-check').expect(405),
|
||||
agent.post('/health-check').expect(405),
|
||||
agent.patch('/health-check').expect(405),
|
||||
agent.delete('/health-check').expect(405),
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
verifyRequests([
|
||||
agent.get('/health-check/foo').expect(404),
|
||||
agent.get('/health-check-foo').expect(404),
|
||||
agent.get('/health-checknfoo').expect(404),
|
||||
agent.get('/foo/health-check').expect(404),
|
||||
agent.get('/foo-health-check').expect(404),
|
||||
agent.get('/foonhealth-check').expect(404),
|
||||
], done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('GET *', () => {
|
||||
|
||||
it('should respond with 404', done => {
|
||||
const responseBody = 'Unknown resource in request: GET /some/url';
|
||||
verifyRequests([agent.get('/some/url').expect(404, responseBody)], done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('ALL *', () => {
|
||||
|
||||
it('should respond with 405', done => {
|
||||
const responseFor = (method: string) => `Unsupported method in request: ${method.toUpperCase()} /some/url`;
|
||||
|
||||
verifyRequests([
|
||||
agent.put('/some/url').expect(405, responseFor('put')),
|
||||
agent.post('/some/url').expect(405, responseFor('post')),
|
||||
agent.patch('/some/url').expect(405, responseFor('patch')),
|
||||
agent.delete('/some/url').expect(405, responseFor('delete')),
|
||||
], done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
28
aio/aio-builds-setup/dockerbuild/scripts-js/tsconfig.json
Normal file
28
aio/aio-builds-setup/dockerbuild/scripts-js/tsconfig.json
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"alwaysStrict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"lib": [
|
||||
"es2016"
|
||||
],
|
||||
"noImplicitAny": true,
|
||||
"noImplicitReturns": true,
|
||||
"noImplicitThis": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"pretty": true,
|
||||
"rootDir": ".",
|
||||
"skipLibCheck": true,
|
||||
"strictNullChecks": true,
|
||||
"target": "es5",
|
||||
"typeRoots": [
|
||||
"node_modules/@types"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"lib/**/*",
|
||||
"test/**/*"
|
||||
]
|
||||
}
|
15
aio/aio-builds-setup/dockerbuild/scripts-js/tslint.json
Normal file
15
aio/aio-builds-setup/dockerbuild/scripts-js/tslint.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"extends": "tslint:recommended",
|
||||
"rules": {
|
||||
"array-type": [true, "array"],
|
||||
"arrow-parens": [true, "ban-single-arg-parens"],
|
||||
"interface-name": [true, "never-prefix"],
|
||||
"max-classes-per-file": [true, 4],
|
||||
"no-consecutive-blank-lines": [true, 2],
|
||||
"no-console": false,
|
||||
"no-namespace": [true, "allow-declarations"],
|
||||
"no-string-literal": false,
|
||||
"quotemark": [true, "single"],
|
||||
"variable-name": [true, "ban-keywords", "check-format", "allow-leading-underscore"]
|
||||
}
|
||||
}
|
2552
aio/aio-builds-setup/dockerbuild/scripts-js/yarn.lock
Normal file
2552
aio/aio-builds-setup/dockerbuild/scripts-js/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
8
aio/aio-builds-setup/dockerbuild/scripts-sh/clean-up.sh
Executable file
8
aio/aio-builds-setup/dockerbuild/scripts-sh/clean-up.sh
Executable file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e -o pipefail
|
||||
|
||||
# Set up env variables
|
||||
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null)
|
||||
|
||||
# Run the clean-up
|
||||
node $AIO_SCRIPTS_JS_DIR/dist/lib/clean-up >> /var/log/aio/clean-up.log 2>&1
|
53
aio/aio-builds-setup/dockerbuild/scripts-sh/health-check.sh
Normal file
53
aio/aio-builds-setup/dockerbuild/scripts-sh/health-check.sh
Normal file
@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
set +e -o pipefail
|
||||
|
||||
|
||||
# Variables
|
||||
exitCode=0
|
||||
|
||||
|
||||
# Helpers
|
||||
function reportStatus {
|
||||
local lastExitCode=$?
|
||||
echo "$1: $([[ $lastExitCode -eq 0 ]] && echo OK || echo NOT OK)"
|
||||
[[ $lastExitCode -eq 0 ]] || exitCode=1
|
||||
}
|
||||
|
||||
|
||||
# Check services
|
||||
services=(
|
||||
rsyslog
|
||||
cron
|
||||
nginx
|
||||
pm2-root
|
||||
)
|
||||
for s in ${services[@]}; do
|
||||
service $s status > /dev/null
|
||||
reportStatus "Service '$s'"
|
||||
done
|
||||
|
||||
|
||||
# Check servers
|
||||
origins=(
|
||||
http://$AIO_UPLOAD_HOSTNAME:$AIO_UPLOAD_PORT
|
||||
http://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTP
|
||||
https://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTPS
|
||||
)
|
||||
for o in ${origins[@]}; do
|
||||
curl --fail --silent $o/health-check > /dev/null
|
||||
reportStatus "Server '$o'"
|
||||
done
|
||||
|
||||
|
||||
# Check resolution of external URLs
|
||||
origins=(
|
||||
https://google.com
|
||||
)
|
||||
for o in ${origins[@]}; do
|
||||
curl --fail --silent $o > /dev/null
|
||||
reportStatus "External URL '$o'"
|
||||
done
|
||||
|
||||
|
||||
# Exit
|
||||
exit $exitCode
|
18
aio/aio-builds-setup/dockerbuild/scripts-sh/init.sh
Executable file
18
aio/aio-builds-setup/dockerbuild/scripts-sh/init.sh
Executable file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
set -e -o pipefail
|
||||
|
||||
exec >> /var/log/aio/init.log
|
||||
exec 2>&1
|
||||
|
||||
# Start the services
|
||||
echo [`date`] - Starting services...
|
||||
mkdir -p $AIO_NGINX_LOGS_DIR
|
||||
mkdir -p $TEST_AIO_NGINX_LOGS_DIR
|
||||
|
||||
service rsyslog start
|
||||
service cron start
|
||||
service dnsmasq start
|
||||
service nginx start
|
||||
service pm2-root start
|
||||
aio-upload-server-prod start
|
||||
echo [`date`] - Services started successfully.
|
15
aio/aio-builds-setup/dockerbuild/scripts-sh/upload-server-prod.sh
Executable file
15
aio/aio-builds-setup/dockerbuild/scripts-sh/upload-server-prod.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
set -e -o pipefail
|
||||
|
||||
# Set up env variables for production
|
||||
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null)
|
||||
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null)
|
||||
|
||||
# Start the upload-server instance
|
||||
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
|
||||
# (Currently, there doesn't seem to be a straight forward way.)
|
||||
action=$([ "$1" == "stop" ] && echo "stop" || echo "start")
|
||||
pm2 $action $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server \
|
||||
--log /var/log/aio/upload-server-prod.log \
|
||||
--name aio-upload-server-prod \
|
||||
${@:2}
|
@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
set -e -o pipefail
|
||||
|
||||
# Set up env variables for testing
|
||||
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
|
||||
export AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME
|
||||
export AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION
|
||||
export AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS
|
||||
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$TEST_AIO_PREVIEW_DEPLOYMENT_TOKEN
|
||||
export AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG
|
||||
export AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME
|
||||
export AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT
|
||||
|
||||
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/TEST_GITHUB_TOKEN 2>/dev/null || echo "TEST_GITHUB_TOKEN")
|
||||
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/TEST_PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null || echo "TEST_PREVIEW_DEPLOYMENT_TOKEN")
|
||||
|
||||
# Start the upload-server instance
|
||||
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
|
||||
# (Currently, there doesn't seem to be a straight forward way.)
|
||||
appName=aio-upload-server-test
|
||||
if [[ "$1" == "stop" ]]; then
|
||||
pm2 delete $appName
|
||||
else
|
||||
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server/index-test.js \
|
||||
--log /var/log/aio/upload-server-test.log \
|
||||
--name $appName \
|
||||
--no-autorestart \
|
||||
${@:2}
|
||||
fi
|
40
aio/aio-builds-setup/dockerbuild/scripts-sh/verify-setup.sh
Normal file
40
aio/aio-builds-setup/dockerbuild/scripts-sh/verify-setup.sh
Normal file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
set -e -o pipefail
|
||||
|
||||
logFile=/var/log/aio/verify-setup.log
|
||||
uploadServerLogFile=/var/log/aio/upload-server-verify-setup.log
|
||||
|
||||
exec 3>&1
|
||||
exec >> $logFile
|
||||
exec 2>&1
|
||||
|
||||
echo "[`date`] - Starting verification..."
|
||||
|
||||
# Helpers
|
||||
function countdown {
|
||||
message=$1
|
||||
secs=$2
|
||||
while [ $secs -gt 0 ]; do
|
||||
echo -ne "$message in $secs...\033[0K\r"
|
||||
sleep 1
|
||||
: $((secs--))
|
||||
done
|
||||
echo -ne "\033[0K\r"
|
||||
}
|
||||
|
||||
function onExit {
|
||||
aio-upload-server-test stop
|
||||
echo -e "Full logs in '$logFile'.\n" > /dev/fd/3
|
||||
}
|
||||
|
||||
# Setup EXIT trap
|
||||
trap 'onExit' EXIT
|
||||
|
||||
# Start an upload-server instance for testing
|
||||
aio-upload-server-test start --log $uploadServerLogFile
|
||||
|
||||
# Give the upload-server some time to start :(
|
||||
countdown "Starting" 5 > /dev/fd/3
|
||||
|
||||
# Run the tests
|
||||
node $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup | tee /dev/fd/3
|
28
aio/aio-builds-setup/docs/_TOC.md
Normal file
28
aio/aio-builds-setup/docs/_TOC.md
Normal file
@ -0,0 +1,28 @@
|
||||
# VM Setup Instructions
|
||||
|
||||
|
||||
## Overview
|
||||
- [General overview](overview--general.md)
|
||||
- [Security model](overview--security-model.md)
|
||||
- [Available Commands](overview--scripts-and-commands.md)
|
||||
|
||||
|
||||
## Setting up the VM
|
||||
- [Set up secrets](vm-setup--set-up-secrets.md)
|
||||
- [Set up docker](vm-setup--set-up-docker.md)
|
||||
- [Attach persistent disk](vm-setup--attach-persistent-disk.md)
|
||||
- [Create host directories and files](vm-setup--create-host-dirs-and-files.md)
|
||||
- [Create docker image](vm-setup--create-docker-image.md)
|
||||
|
||||
|
||||
## Configuring the docker image
|
||||
- [Available environment variables](image-config--environment-variables.md)
|
||||
|
||||
|
||||
## Starting the docker container
|
||||
- [Create docker image](vm-setup--start-docker-container.md)
|
||||
|
||||
|
||||
## Miscellaneous
|
||||
- [Debug docker container](misc--debug-docker-container.md)
|
||||
- [Integrate with CI](misc--integrate-with-ci.md)
|
@ -0,0 +1,52 @@
|
||||
# Image config - Environment variables
|
||||
|
||||
|
||||
Below is a list of environment variables that can be configured when creating the docker image (as
|
||||
described [here](vm-setup--create-docker-image.md)). An up-to-date list of the configurable
|
||||
environment variables and their default values can be found in the
|
||||
[Dockerfile](../dockerbuild/Dockerfile).
|
||||
|
||||
**Note:**
|
||||
Each variable has a `TEST_` prefixed counterpart, which is used for testing purposes. In most cases
|
||||
you don't need to specify values for those.
|
||||
|
||||
- `AIO_BUILDS_DIR`:
|
||||
The directory (inside the container) where the uploaded build artifacts are kept.
|
||||
|
||||
- `AIO_DOMAIN_NAME`:
|
||||
The domain name of the server.
|
||||
|
||||
- `AIO_GITHUB_ORGANIZATION`:
|
||||
The GitHub organization whose teams arew whitelisted for accepting uploads.
|
||||
See also `AIO_GITHUB_TEAM_SLUGS`.
|
||||
|
||||
- `AIO_GITHUB_TEAM_SLUGS`:
|
||||
A comma-separated list of teams, whose authors are allowed to upload PRs.
|
||||
See also `AIO_GITHUB_ORGANIZATION`.
|
||||
|
||||
- `AIO_NGINX_HOSTNAME`:
|
||||
The internal hostname for accessing the nginx server. This is mostly used for performing a
|
||||
periodic health-check.
|
||||
|
||||
- `AIO_NGINX_PORT_HTTP`:
|
||||
The port number on which nginx listens for HTTP connections. This should be mapped to the
|
||||
corresponding port on the host VM (as described [here](vm-setup--start-docker-container.md)).
|
||||
|
||||
- `AIO_NGINX_PORT_HTTPS`:
|
||||
The port number on which nginx listens for HTTPS connections. This should be mapped to the
|
||||
corresponding port on the host VM (as described [here](vm-setup--start-docker-container.md)).
|
||||
|
||||
- `AIO_REPO_SLUG`:
|
||||
The repository slug (in the form `<user>/<repo>`) for which PRs will be uploaded.
|
||||
|
||||
- `AIO_UPLOAD_HOSTNAME`:
|
||||
The internal hostname for accessing the Node.js upload-server. This is used by nginx for
|
||||
delegating upload requests and also for performing a periodic health-check.
|
||||
|
||||
- `AIO_UPLOAD_MAX_SIZE`:
|
||||
The maximum allowed size for the uploaded gzip archive containing the build artifacts. Files
|
||||
larger than this will be rejected.
|
||||
|
||||
- `AIO_UPLOAD_PORT`:
|
||||
The port number on which the Node.js upload-server listens for HTTP connections. This is used by
|
||||
nginx for delegating upload requests and also for performing a periodic health-check.
|
12
aio/aio-builds-setup/docs/misc--debug-docker-container.md
Normal file
12
aio/aio-builds-setup/docs/misc--debug-docker-container.md
Normal file
@ -0,0 +1,12 @@
|
||||
# Miscellaneous - Debug docker container
|
||||
|
||||
|
||||
TODO (gkalpak): Add docs. Mention:
|
||||
- `aio-health-check`
|
||||
- `aio-verify-setup`
|
||||
- Test nginx accessible at:
|
||||
- `http://$TEST_AIO_NGINX_HOTNAME:$TEST_AIO_NGINX_PORT_HTTP`
|
||||
- `https://$TEST_AIO_NGINX_HOTNAME:$TEST_AIO_NGINX_PORT_HTTPS`
|
||||
- Test upload-server accessible at:
|
||||
- `http://$TEST_AIO_UPLOAD_HOTNAME:$TEST_AIO_UPLOAD_PORT`
|
||||
- Local DNS (via dnsmasq) maps the above hostnames to 127.0.0.1
|
12
aio/aio-builds-setup/docs/misc--integrate-with-ci.md
Normal file
12
aio/aio-builds-setup/docs/misc--integrate-with-ci.md
Normal file
@ -0,0 +1,12 @@
|
||||
# Miscellaneous - Integrate with CI
|
||||
|
||||
|
||||
TODO (gkalpak): Add docs. Mention:
|
||||
- Travis' JWT addon (+ limitations).
|
||||
Relevant files: `.travis.yml`
|
||||
- Testing on CI.
|
||||
Relevant files: `ci/test-aio.sh`, `aio/aio-builds-setup/scripts/test.sh`
|
||||
- Preverifying on CI.
|
||||
Relevant files: `ci/deploy.sh`, `aio/aio-builds-setup/scripts/travis-preverify-pr.sh`
|
||||
- Deploying from CI.
|
||||
Relevant files: `ci/deploy.sh`, `aio/scripts/deploy-preview.sh`
|
84
aio/aio-builds-setup/docs/overview--general.md
Normal file
84
aio/aio-builds-setup/docs/overview--general.md
Normal file
@ -0,0 +1,84 @@
|
||||
# Overview - General
|
||||
|
||||
|
||||
## Objective
|
||||
Whenever a PR job is run on Travis, we want to build `angular.io` and upload the build artifacts to
|
||||
a publicly accessible server so that collaborators (developers, designers, authors, etc) can preview
|
||||
the changes without having to checkout and build the app locally.
|
||||
|
||||
|
||||
## Source code
|
||||
In order to make it easier to administer the server and version-control the setup, we are using
|
||||
[docker](https://www.docker.com) to run a container on a VM. The Dockerfile and all other files
|
||||
necessary for creating the docker container are stored (and versioned) along with the angular.io
|
||||
project's source code (currently part of the angular/angular repo) in the `aio-builds-setup/`
|
||||
directory.
|
||||
|
||||
|
||||
## Setup
|
||||
The VM is hosted on [Google Compute Engine](https://cloud.google.com/compute/). The host OS is
|
||||
debian:jessie. For more info how to set up the host VM take a look at the "Setting up the VM"
|
||||
section in [TOC](_TOC.md).
|
||||
|
||||
|
||||
## Security model
|
||||
Since we are managing a public server, it is important to take appropriate measures in order to
|
||||
prevent abuse. For more details on the challenges and the chosen approach take a look at the
|
||||
[security model](overview--security-model.md).
|
||||
|
||||
|
||||
## The 10000 feet view
|
||||
This section gives a brief summary of the several operations performed on CI and by the docker
|
||||
container:
|
||||
|
||||
|
||||
### On CI (Travis)
|
||||
- Build job completes successfully (i.e. build succeeds and tests pass).
|
||||
- The CI script checks whether the build job was initiated by a PR against the angular/angular
|
||||
master branch.
|
||||
- The CI script checks whether the PR has touched any files inside the angular.io project directory
|
||||
(currently `aio/`).
|
||||
- The CI script checks whether the author of the PR is a member of one of the whitelisted GitHub
|
||||
teams (and therefore allowed to upload).
|
||||
**Note:**
|
||||
For security reasons, the same checks will be performed on the server as well. This is an optional
|
||||
step with the purpose of:
|
||||
1. Avoiding the wasted overhead associated with uploads that are going to be rejected (e.g.
|
||||
building the artifacts, sending them to the server, running checks on the server, etc).
|
||||
2. Avoiding failing the build (due to an error response from the server) or requiring additional
|
||||
logic for detecting the reasons of the failure.
|
||||
- The CI script gzip and upload the build artifacts to the server.
|
||||
|
||||
More info on how to set things up on CI can be found [here](misc--integrate-with-ci.md).
|
||||
|
||||
|
||||
### Uploading build artifacts
|
||||
- nginx receives upload request.
|
||||
- nginx checks that the uploaded gzip archive does not exceed the specified max file size, stores it
|
||||
in a temporary location and passes the filepath to the Node.js upload-server.
|
||||
- The upload-server verifies that the uploaded file is not trying to overwrite an existing build,
|
||||
and runs several checks to determine whether the request should be accepted (more details can be
|
||||
found [here](overview--security-model.md)).
|
||||
- The upload-server deploys the artifacts to a sub-directory named after the PR number and SHA:
|
||||
`<PR>/<SHA>/`
|
||||
- The upload-server posts a comment on the corresponding PR on GitHub mentioning the SHA and the
|
||||
the link where the preview can be found.
|
||||
|
||||
|
||||
### Serving build artifacts
|
||||
- nginx receives a request for an uploaded resource on a subdomain corresponding to the PR and SHA.
|
||||
E.g.: `pr<PR>-<SHA>.ngbuilds.io/path/to/resurce`
|
||||
- nginx maps the subdomain to the correct sub-direcory and serves the resource.
|
||||
E.g.: `/<PR>/<SHA>/path/to/resource`
|
||||
|
||||
|
||||
### Removing obsolete artifacts
|
||||
In order to avoid flooding the disk with unnecessary build artifacts, there is a cronjob that runs a
|
||||
clean-up tasks once a day. The task retrieves all open PRs from GitHub and removes all directories
|
||||
that do not correspond with an open PR.
|
||||
|
||||
|
||||
### Health-check
|
||||
The docker service runs a periodic health-check that verifies the running conditions of the
|
||||
container. This includes verifying the status of specific system services, the responsiveness of
|
||||
nginx and the upload-server and internet connectivity.
|
55
aio/aio-builds-setup/docs/overview--scripts-and-commands.md
Normal file
55
aio/aio-builds-setup/docs/overview--scripts-and-commands.md
Normal file
@ -0,0 +1,55 @@
|
||||
# Overview - Scripts and Commands
|
||||
|
||||
|
||||
This is an overview of the available scripts and commands.
|
||||
|
||||
|
||||
## Scripts
|
||||
The scripts are located inside `<aio-builds-setup-dir>/scripts/`. The following scripts are
|
||||
available:
|
||||
|
||||
- `build.sh`:
|
||||
Can be used for creating a preconfigured docker image.
|
||||
See [here](vm-setup--create-docker-image.md) for more info.
|
||||
|
||||
- `test.sh`
|
||||
Can be used for running the tests for `<aio-builds-setup-dir>/dockerbuild/scripts-js/`. This is
|
||||
useful for CI integration. See [here](misc--integrate-with-ci.md) for more info.
|
||||
|
||||
- `travis-preverify-pr.sh`
|
||||
Can be used for "preverifying" a PR before uploading the artifacts to the server. It checks that
|
||||
the author of the PR a member of one of the specified GitHub teams and therefore allowed to upload
|
||||
build artifacts. This is useful for CI integration. See [here](misc--integrate-with-ci.md) for
|
||||
more info.
|
||||
|
||||
|
||||
## Commands
|
||||
The following commands are available globally from inside the docker container. They are either used
|
||||
by the container to perform its various operations or can be used ad-hoc, mainly for testing
|
||||
purposes. Each command is backed by a corresponding script inside
|
||||
`<aio-builds-setup-dir>/dockerbuild/scripts-sh/`.
|
||||
|
||||
- `aio-clean-up`:
|
||||
Cleans up the builds directory by removing the artifacts that do not correspond to an open PR.
|
||||
_It is run as a daily cronjob._
|
||||
|
||||
- `aio-health-check`:
|
||||
Runs a basic health-check, verifying that the necessary services are running, the servers are
|
||||
responding and there is a working internet connection.
|
||||
_It is used periodically by docker for determining the container's health status._
|
||||
|
||||
- `aio-init`:
|
||||
Initializes the container (mainly by starting the necessary services).
|
||||
_It is run (by default) when starting the container._
|
||||
|
||||
- `aio-upload-server-prod`:
|
||||
Spins up a Node.js upload-server instance.
|
||||
_It is used in `aio-init` (see above) during initialization._
|
||||
|
||||
- `aio-upload-server-test`:
|
||||
Spins up a Node.js upload-server instance for tests.
|
||||
_It is used in `aio-verify-setup` (see below) for running tests._
|
||||
|
||||
- `aio-verify-setup`:
|
||||
Runs a suite of e2e-like tests, mainly verifying the correct (inter)operation of nginx and the
|
||||
Node.js upload-server.
|
116
aio/aio-builds-setup/docs/overview--security-model.md
Normal file
116
aio/aio-builds-setup/docs/overview--security-model.md
Normal file
@ -0,0 +1,116 @@
|
||||
# Overview - Security model
|
||||
|
||||
|
||||
Whenever a PR job is run on Travis, we want to build `angular.io` and upload the build artifacts to
|
||||
a publicly accessible server so that collaborators (developers, designers, authors, etc) can preview
|
||||
the changes without having to checkout and build the app locally.
|
||||
|
||||
This document discusses the security considerations associated with uploading build artifacts as
|
||||
part of the CI setup and serving them publicly.
|
||||
|
||||
|
||||
## Security objectives
|
||||
|
||||
- **Prevent uploading arbitrary content to our servers.**
|
||||
Since there is no restriction on who can submit a PR, we cannot allow any PR's build artifacts to
|
||||
be uploaded.
|
||||
|
||||
- **Prevent overwriting other peoples uploaded content.**
|
||||
There needs to be a mechanism in place to ensure that the uploaded content does indeed correspond
|
||||
to the PR indicated by its URL.
|
||||
|
||||
- **Prevent arbitrary access on the server.**
|
||||
Since the PR author has full access over the build artifacts that would be uploaded, we must
|
||||
ensure that the uploaded files will not enable arbitrary access to the server or expose sensitive
|
||||
info.
|
||||
|
||||
|
||||
## Issues / Caveats
|
||||
|
||||
- Because the PR author can change the scripts run on CI, any security mechanisms must be immune to
|
||||
such changes.
|
||||
|
||||
- For security reasons, encrypted Travis variables are not available to PRs, so we can't rely on
|
||||
them to implement security.
|
||||
|
||||
|
||||
## Implemented approach
|
||||
|
||||
|
||||
### In a nutshell
|
||||
The implemented approach can be broken up to the following sub-tasks:
|
||||
|
||||
1. Verify which PR the uploaded artifacts correspond to.
|
||||
2. Determine the author of the PR.
|
||||
3. Check whether the PR author is a member of some whitelisted GitHub team.
|
||||
4. Deploy the artifacts to the corresponding PR's directory.
|
||||
5. Prevent overwriting previously deployed artifacts (which ensures that the guarantees established
|
||||
during deployment will remain valid until the artifacts are removed).
|
||||
6. Prevent uploaded files from accessing anything outside their directory.
|
||||
|
||||
|
||||
### Implementation details
|
||||
This section describes how each of the aforementioned sub-tasks is accomplished:
|
||||
|
||||
1. **Verify which PR the uploaded artifacts correspond to.**
|
||||
|
||||
We are taking advantage of Travis' [JWT addon](https://docs.travis-ci.com/user/jwt). By sharing
|
||||
a secret between Travis (which keeps it private but uses it to sign a JWT) and the server (which
|
||||
uses it to verify the authenticity of the JWT), we can accomplish the following:
|
||||
a. Verify that the upload request comes from Travis.
|
||||
b. Determine the PR that these artifacts correspond to (since Travis puts that information into
|
||||
the JWT, without the PR author being able to modify it).
|
||||
|
||||
_Note:_
|
||||
_There are currently certain limitation in the implementation of the JWT addon._
|
||||
_See the next section for more details._
|
||||
|
||||
2. **Determine the author of the PR.**
|
||||
|
||||
Once we have securely associated the uploaded artifaacts to a PR, we retrieve the PR's metadata -
|
||||
including the author's username - using the [GitHub API](https://developer.github.com/v3/).
|
||||
To avoid rate-limit restrictions, we use a Personal Access Token (issued by
|
||||
[@mary-poppins](https://github.com/mary-poppins)).
|
||||
|
||||
3. **Check whether the PR author is a member of some whitelisted GitHub team.**
|
||||
|
||||
Again using the GitHub API, we can verify the author's membership in one of the
|
||||
whitelisted/trusted GitHub teams. For this operation, we need a PErsonal Access Token with the
|
||||
`read:org` scope issued by a user that can "see" the specified GitHub organization.
|
||||
Here too, we use token by @mary-poppins.
|
||||
|
||||
4. **Deploy the artifacts to the corresponding PR's directory.**
|
||||
|
||||
With the preceeding steps, we have verified that the uploaded artifacts have been uploaded by
|
||||
Travis and correspond to a PR whose author is a member of a trusted team. Essentially, as long as
|
||||
sub-tasks 1, 2 and 3 can be securely accomplished, it is possible to "project" the trust we have
|
||||
in a team's members through the PR and Travis to the build artifacts.
|
||||
|
||||
5. **Prevent overwriting previously deployed artifacts**.
|
||||
|
||||
In order to enforce this restriction (and ensure that the deployed artifacts validity is
|
||||
preserved throughout their "lifetime"), the server that handles the upload (currently a Node.js
|
||||
Express server) rejects uploads that target an existing directory.
|
||||
_Note: A PR can contain multiple uploads; one for each SHA that was built on Travis._
|
||||
|
||||
6. **Prevent uploaded files from accessing anything outside their directory.**
|
||||
|
||||
Nginx (which is used to serve the uploaded artifacts) has been configured to not follow symlinks
|
||||
outside of the directory where the build artifacts are stored.
|
||||
|
||||
|
||||
## Assumptions / Things to keep in mind
|
||||
|
||||
- Each trusted PR author has full control over the content that is uploaded for their PRs. Part of
|
||||
the security model relies on the trustworthiness of these authors.
|
||||
|
||||
- If anyone gets access to the `PREVIEW_DEPLOYMENT_TOKEN` (a.k.a. `NGBUILDS_IO_KEY` on
|
||||
angular/angular) variable generated for each Travis job, they will be able to impersonate the
|
||||
corresponding PR's author on the preview server for as long as the token is valid (currently 90
|
||||
mins). Because of this, the value of the `PREVIEW_DEPLOYMENT_TOKEN` should not be made publicly
|
||||
accessible (e.g. by printing it on the Travis job log).
|
||||
|
||||
- Travis does only allow specific whitelisted property names to be used with the JWT addon. The only
|
||||
known such property at the time is `SAUCE_ACCESS_KEY` (used for integration with SauceLabs). In
|
||||
order to be able to actually use the JWT addon we had to name the encrypted variable
|
||||
`SAUCE_ACCESS_KEY` (which we later re-assign to `NGBUILDS_IO_KEY`).
|
@ -0,0 +1,20 @@
|
||||
# VM setup - Attach persistent disk
|
||||
|
||||
|
||||
## Create `aio-builds` persistent disk (if not already exists)
|
||||
- Follow instructions [here](https://cloud.google.com/compute/docs/disks/add-persistent-disk#create_disk).
|
||||
- `sudo mkfs.ext4 -F -E lazy_itable_init=0,lazy_journal_init=0,discard /dev/disk/by-id/google-aio-builds`
|
||||
|
||||
|
||||
## Mount disk
|
||||
- `sudo mkdir -p /mnt/disks/aio-builds`
|
||||
- `sudo mount -o discard,defaults /dev/disk/by-id/google-aio-builds /mnt/disks/aio-builds`
|
||||
- `sudo chmod a+w /mnt/disks/aio-builds`
|
||||
|
||||
|
||||
## Mount disk on boot
|
||||
- Run:
|
||||
```
|
||||
echo UUID=`sudo blkid -s UUID -o value /dev/disk/by-id/google-aio-builds` \
|
||||
/mnt/disks/aio-builds ext4 discard,defaults,nofail 0 2 | sudo tee -a /etc/fstab
|
||||
```
|
32
aio/aio-builds-setup/docs/vm-setup--create-docker-image.md
Normal file
32
aio/aio-builds-setup/docs/vm-setup--create-docker-image.md
Normal file
@ -0,0 +1,32 @@
|
||||
# VM setup - Create docker image
|
||||
|
||||
|
||||
## Checkout repository
|
||||
- `git clone <repo-url>`
|
||||
|
||||
|
||||
## Build docker image
|
||||
- `<aio-builds-setup-dir>/scripts/build.sh [<name>[:<tag>] [--build-arg <NAME>=<value> ...]]`
|
||||
- You can overwrite the default environment variables inside the image, by passing new values using
|
||||
`--build-arg`.
|
||||
|
||||
**Note:** The build script has to execute docker commands with `sudo`.
|
||||
|
||||
|
||||
## Example
|
||||
The following commands would create a docker image from GitHub repo `foo/bar` to be deployed on the
|
||||
`foobar-builds.io` domain and accepting PR deployments from authors that are members of the
|
||||
`bar-core` and `bar-docs-authors` teams of organization `foo`:
|
||||
|
||||
- `git clone https://github.com/foo/bar.git foobar`
|
||||
- Run:
|
||||
```
|
||||
./foobar/aio-builds-setup/scripts/build.sh foobar-builds \
|
||||
--build-arg AIO_REPO_SLUG=foo/bar \
|
||||
--build-arg AIO_DOMAIN_NAME=foobar-builds.io \
|
||||
--build-arg AIO_GITHUB_ORGANIZATION=foo \
|
||||
--build-arg AIO_GITHUB_TEMA_SLUGS=bar-core,bar-docs-authors
|
||||
```
|
||||
|
||||
A full list of the available environment variables can be found
|
||||
[here](image-config--environment-variables.md).
|
@ -0,0 +1,74 @@
|
||||
# VM setup - Create host directories and files
|
||||
|
||||
|
||||
## Create directory with secrets
|
||||
For security reasons, sensitive info (such as tokens and passwords) are not hardcoded into the
|
||||
docker image, nor passed as environment variables at runtime. They are passed to the docker
|
||||
container from the host VM as files inside a directory. Each file's name is the name of the variable
|
||||
and the file content is the value. These are read from inside the running container when necessary.
|
||||
|
||||
More info on how to create `secrets` directory and files can be found
|
||||
[here](vm-setup--set-up-secrets.md).
|
||||
|
||||
|
||||
## Create directory for build artifacts
|
||||
The uploaded build artifacts should be kept on a directory outside the docker container, so it is
|
||||
easier to replace the container without losing the uploaded builds. For portability across VMs a
|
||||
persistent disk can be used (as described [here](vm-setup--attach-persistent-disk.md)).
|
||||
|
||||
**Note:** The directories created inside that directory will be owned by user `www-data`.
|
||||
|
||||
|
||||
## Create SSL certificates (Optional for dev)
|
||||
The host VM can attach a directory containing the SSL certificate and key to be used by the nginx
|
||||
server for serving the uploaded build artifacts. More info on how to attach the directory when
|
||||
starting the container can be found [here](vm-setup--start-docker-container.md).
|
||||
|
||||
In order for the container to be able to find the certificate and key, they should be named
|
||||
`<DOMAIN_NAME>.crt` and `<DOMAIN_NAME>.key` respectively. For example, for a domain name
|
||||
`ngbuild.io`, nginx will look for files `ngbuilds.io.crt` and `ngbuilds.io.key`. More info on how to
|
||||
specify the domain name see [here](vm-setup--create-docker-image.md).
|
||||
|
||||
If no directory is attached, nginx will use an internal self-signed certificate. This is convenient
|
||||
during development, but is not suitable for production.
|
||||
|
||||
**Note:**
|
||||
Since nginx needs to be able to serve requests for both the main domain as well as any subdomain
|
||||
(e.g. `ngbuilds.io/` and `foo-bar.ngbuilds.io/`), the provided certificate needs to be a wildcard
|
||||
certificate covering both the domain and subdomains.
|
||||
|
||||
|
||||
## Create directory for logs (Optional)
|
||||
Optionally, a logs directory can pe passed to the docker container for storing non-system-related
|
||||
logs. If not provided, the logs are kept locally on the container and will be lost whenever the
|
||||
container is replaced (e.g. when updating to use a newer version of the docker image).
|
||||
|
||||
The following files log files are kept in this directory:
|
||||
|
||||
- `clean-up.log`:
|
||||
Output of the `aio-clean-up` command, run as a cronjob for cleaning up the build artifacts of
|
||||
closed PRs.
|
||||
|
||||
- `init.log`:
|
||||
Output of the `aio-init` command, run (by default) when starting the container.
|
||||
|
||||
- `nginx/{access,error}.log`:
|
||||
The access and error logs produced by the nginx server while serving "production" files.
|
||||
|
||||
- `nginx-test/{access,error}.log`:
|
||||
The access and error logs produced by the nginx server while serving "test" files. This is only
|
||||
used when running tests locally from inside the container, e.g. with the `aio-verify-setup`
|
||||
command. (See [here](overview--scripts-and-commands.md) for more info.)
|
||||
|
||||
- `upload-server-{prod,test,verify-setup}-*.log`:
|
||||
The logs produced by the Node.js upload-server while serving either:
|
||||
- `-prod`: "Production" files (g.g during normal operation).
|
||||
- `-test`: "Test" files (e.g. when a test instance is started with the `aio-upload-server-test`
|
||||
command).
|
||||
- `-verify-setup`: "Test" files, but while running `aio-verify-setup`.
|
||||
|
||||
(See [here](overview--scripts-and-commands.md) for more info the commands mentioned above.)
|
||||
|
||||
- `verify-setup.log`:
|
||||
The output of the `aio-verify-setup` command (e.g. Jasmine output), except for upload-server
|
||||
output which is logged to `upload-server-verify-setup-*.log` (see above).
|
35
aio/aio-builds-setup/docs/vm-setup--set-up-docker.md
Normal file
35
aio/aio-builds-setup/docs/vm-setup--set-up-docker.md
Normal file
@ -0,0 +1,35 @@
|
||||
# VM Setup - Set up docker
|
||||
|
||||
|
||||
## Install docker
|
||||
|
||||
_Debian (jessie):_
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install -y apt-transport-https ca-certificates curl git software-properties-common`
|
||||
- `curl -fsSL https://apt.dockerproject.org/gpg | sudo apt-key add -`
|
||||
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
|
||||
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ debian-$(lsb_release -cs) main"`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get -y install docker-engine`
|
||||
|
||||
_Ubuntu (16.04):_
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install -y curl git linux-image-extra-$(uname -r) linux-image-extra-virtual`
|
||||
- `sudo apt-get install -y apt-transport-https ca-certificates`
|
||||
- `curl -fsSL https://yum.dockerproject.org/gpg | sudo apt-key add -`
|
||||
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
|
||||
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ ubuntu-$(lsb_release -cs) main"`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get -y install docker-engine`
|
||||
|
||||
|
||||
## Start the docker
|
||||
- `sudo service docker start`
|
||||
|
||||
|
||||
## Test docker
|
||||
- `sudo docker run hello-world`
|
||||
|
||||
|
||||
## Start docker on boot
|
||||
- `sudo systemctl enable docker`
|
52
aio/aio-builds-setup/docs/vm-setup--set-up-secrets.md
Normal file
52
aio/aio-builds-setup/docs/vm-setup--set-up-secrets.md
Normal file
@ -0,0 +1,52 @@
|
||||
# VM Setup - Set up secrets
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
Necessary secrets:
|
||||
|
||||
1. `GITHUB_TOKEN`
|
||||
- Used for:
|
||||
- Retrieving open PRs without rate-limiting.
|
||||
- Retrieving PR author.
|
||||
- Retrieving members of the `angular-core` team.
|
||||
- Posting comments with preview links on PRs.
|
||||
|
||||
2. `PREVIEW_DEPLOYMENT_TOKEN`
|
||||
- Used for:
|
||||
- Decoding the JWT tokens received with `/create-build` requests.
|
||||
|
||||
**Note:**
|
||||
`TEST_GITHUB_TOKEN` and `TEST_PREVIEW_DEPLOYMENT_TOKEN` can also be created similar to their
|
||||
non-TEST counterparts and they will be loaded when running `aio-verify-setup`, but it is currently
|
||||
not clear if/how they can be used in tests.
|
||||
|
||||
|
||||
## Create secrets
|
||||
|
||||
1. `GITHUB_TOKEN`
|
||||
- Visit https://github.com/settings/tokens.
|
||||
- Generate new token with the `public_repo` scope.
|
||||
|
||||
2. `PREVIEW_DEPLOYMENT_TOKEN`
|
||||
- Just generate a hard-to-guess character sequence.
|
||||
- Add it to `.travis.yml` under `addons -> jwt -> secure`.
|
||||
Can be added automatically with: `travis encrypt --add addons.jwt PREVIEW_DEPLOYMENT_TOKEN=<access-key>`
|
||||
|
||||
**Note:**
|
||||
Due to [travis-ci/travis-ci#7223](https://github.com/travis-ci/travis-ci/issues/7223) it is not
|
||||
currently possible to use the JWT addon (as described above) for anything other than the
|
||||
`SAUCE_ACCESS_KEY` variable. You can get creative, though...
|
||||
|
||||
**WARNING**
|
||||
TO avoid arbitrary uploads, make sure the `PREVIEW_DEPLOYMENT_TOKEN` is NOT printed in the Travis log.
|
||||
|
||||
|
||||
## Save secrets on the VM
|
||||
|
||||
- `sudo mkdir /aio-secrets`
|
||||
- `sudo touch /aio-secrets/GITHUB_TOKEN`
|
||||
- Insert `<github-token>` into `/aio-secrets/GITHUB_TOKEN`.
|
||||
- `sudo touch /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN`
|
||||
- Insert `<access-token>` into `/aio-secrets/PREVIEW_DEPLOYMENT_TOKEN`.
|
||||
- `sudo chmod 400 /aio-secrets/*`
|
@ -0,0 +1,92 @@
|
||||
# VM setup - Start docker container
|
||||
|
||||
|
||||
## The `docker run` command
|
||||
Once everything has been setup and configured, a docker container can be started with the following
|
||||
command:
|
||||
|
||||
```
|
||||
sudo docker run \
|
||||
-d \
|
||||
--dns 127.0.0.1 \
|
||||
--name <instance-name> \
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
--restart unless-stopped \
|
||||
[-v <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
||||
-v <host-secrets-dir>:/aio-secrets:ro \
|
||||
-v <host-builds-dir>:/var/www/aio-builds \
|
||||
[-v <host-logs-dir>:/var/log/aio] \
|
||||
<name>[:<tag>]
|
||||
```
|
||||
|
||||
Below is the same command with inline comments explaining each option. The aPI docs for `docker run`
|
||||
can be found [here](https://docs.docker.com/engine/reference/run/).
|
||||
|
||||
```
|
||||
sudo docker run \
|
||||
|
||||
# Start as a daemon.
|
||||
-d \
|
||||
|
||||
# Use the local DNS server.
|
||||
# (This is necessary for mapping internal URLs, e.g. for the Node.js upload-server.)
|
||||
--dns 127.0.0.1 \
|
||||
|
||||
# USe `<instance-name>` as an alias for the container.
|
||||
# Useful for running `docker` commands, e.g.: `docker stop <instance-name>`
|
||||
--name <instance-name> \
|
||||
|
||||
# Map ports of the hosr VM (left) to ports of the docker container (right)
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
|
||||
# Automatically restart the container (unless it was explicitly stopped by the user).
|
||||
# (This ensures that the container will be automatically started on boot.)
|
||||
--restart unless-stopped \
|
||||
|
||||
# The directory the contains the SSL certificates.
|
||||
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
||||
# If not provided, the container will use self-signed certificates.
|
||||
[-v <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
||||
|
||||
# The directory the contains the secrets (e.g. GitHub token, JWT secret, etc).
|
||||
# (See [here](vm-setup--set-up-secrets.md) for more info.)
|
||||
-v <host-secrets-dir>:/aio-secrets:ro \
|
||||
|
||||
# The uploaded build artifacts will stored to and served from this directory.
|
||||
# (If you are using a persistent disk - as described [here](vm-setup--attach-persistent-disk.md) -
|
||||
# this will be a directory inside the disk.)
|
||||
-v <host-builds-dir>:/var/www/aio-builds \
|
||||
|
||||
# The directory where the logs are being kept.
|
||||
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
||||
# If not provided, the logs will be kept inside the container, which means they will be lost
|
||||
# whenever a new container is created.
|
||||
[-v <host-logs-dir>:/var/log/aio] \
|
||||
|
||||
# The name of the docker image to use (and an optional tag; defaults to `latest`).
|
||||
# (See [here](vm-setup--create-docker-image.md) for instructions on how to create the iamge.)
|
||||
<name>[:<tag>]
|
||||
```
|
||||
|
||||
|
||||
## Example
|
||||
The following command would start a docker container based on the previously created `foobar-builds`
|
||||
docker image, alias it as 'foobar-builds-1' and map predefined directories on the host VM to be used
|
||||
by the container for accesing secrets and SSL certificates and keeping the build artifacts and logs.
|
||||
|
||||
```
|
||||
sudo docker run \
|
||||
-d \
|
||||
--dns 127.0.0.1 \
|
||||
--name foobar-builds-1 \
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
--restart unless-stopped \
|
||||
-v /etc/ssl/localcerts:/etc/ssl/localcerts:ro \
|
||||
-v /foobar-secrets:/aio-secrets:ro \
|
||||
-v /mnt/disks/foobar-builds:/var/www/aio-builds \
|
||||
-v /foobar-logs:/var/log/aio \
|
||||
foobar-builds
|
||||
```
|
16
aio/aio-builds-setup/scripts/build.sh
Executable file
16
aio/aio-builds-setup/scripts/build.sh
Executable file
@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
set -eux -o pipefail
|
||||
|
||||
# Set up env
|
||||
source "`dirname $0`/env.sh"
|
||||
readonly defaultImageNameAndTag="aio-builds:latest"
|
||||
|
||||
# Build `scripts-js/`
|
||||
cd "$SCRIPTS_JS_DIR"
|
||||
yarn install
|
||||
yarn run build
|
||||
cd -
|
||||
|
||||
# Create docker image
|
||||
readonly nameAndOptionalTag=${1:-$defaultImageNameAndTag}
|
||||
sudo docker build --tag $nameAndOptionalTag ${@:2} $DOCKERBUILD_DIR
|
5
aio/aio-builds-setup/scripts/env.sh
Executable file
5
aio/aio-builds-setup/scripts/env.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
readonly THIS_DIR=$(cd $(dirname $0); pwd)
|
||||
readonly DOCKERBUILD_DIR="$THIS_DIR/../dockerbuild"
|
||||
readonly SCRIPTS_JS_DIR="$DOCKERBUILD_DIR/scripts-js"
|
11
aio/aio-builds-setup/scripts/test.sh
Executable file
11
aio/aio-builds-setup/scripts/test.sh
Executable file
@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -eux -o pipefail
|
||||
|
||||
# Set up env
|
||||
source "`dirname $0`/env.sh"
|
||||
|
||||
# Test `scripts-js/`
|
||||
cd "$SCRIPTS_JS_DIR"
|
||||
yarn install
|
||||
yarn test
|
||||
cd -
|
13
aio/aio-builds-setup/scripts/travis-preverify-pr.sh
Executable file
13
aio/aio-builds-setup/scripts/travis-preverify-pr.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -eux -o pipefail
|
||||
|
||||
# Set up env
|
||||
source "`dirname $0`/env.sh"
|
||||
|
||||
# Preverify PR
|
||||
AIO_GITHUB_ORGANIZATION="angular" \
|
||||
AIO_GITHUB_TEAM_SLUGS="angular-core,aio-contributors" \
|
||||
AIO_GITHUB_TOKEN=$(echo ${GITHUB_TEAM_MEMBERSHIP_CHECK_KEY} | rev) \
|
||||
AIO_REPO_SLUG=$TRAVIS_REPO_SLUG \
|
||||
AIO_PREVERIFY_PR=$TRAVIS_PULL_REQUEST \
|
||||
node "$SCRIPTS_JS_DIR/dist/lib/upload-server/index-preverify-pr"
|
17
aio/content/examples/.gitignore
vendored
Normal file
17
aio/content/examples/.gitignore
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
# _boilerplate files
|
||||
!_boilerplate/*
|
||||
*/*/src/styles.css
|
||||
*/*/src/systemjs.config.js
|
||||
*/*/src/tsconfig.json
|
||||
*/*/bs-config.e2e.json
|
||||
*/*/bs-config.json
|
||||
*/*/package.json
|
||||
*/*/tslint.json
|
||||
|
||||
# example files
|
||||
_test-output
|
||||
protractor-helpers.js
|
||||
*/e2e-spec.js
|
||||
**/ts/**/*.js
|
||||
**/js-es6*/**/*.js
|
||||
**/ts-snippets/**/*.js
|
14
aio/content/examples/_boilerplate/bs-config.e2e.json
Normal file
14
aio/content/examples/_boilerplate/bs-config.e2e.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"open": false,
|
||||
"logLevel": "silent",
|
||||
"port": 8080,
|
||||
"server": {
|
||||
"baseDir": "src",
|
||||
"routes": {
|
||||
"/node_modules": "node_modules"
|
||||
},
|
||||
"middleware": {
|
||||
"0": null
|
||||
}
|
||||
}
|
||||
}
|
8
aio/content/examples/_boilerplate/bs-config.json
Normal file
8
aio/content/examples/_boilerplate/bs-config.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"server": {
|
||||
"baseDir": "src",
|
||||
"routes": {
|
||||
"/node_modules": "node_modules"
|
||||
}
|
||||
}
|
||||
}
|
4
aio/content/examples/_boilerplate/example-config.json
Normal file
4
aio/content/examples/_boilerplate/example-config.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"build": "build",
|
||||
"run": "serve"
|
||||
}
|
44
aio/content/examples/_boilerplate/package.json
Normal file
44
aio/content/examples/_boilerplate/package.json
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"name": "angular-examples",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "Example package.json, only contains needed scripts for examples. See _examples/package.json for master package.json.",
|
||||
"scripts": {
|
||||
"build": "tsc -p src/",
|
||||
"build:watch": "tsc -p src/ -w",
|
||||
"build:e2e": "tsc -p e2e/",
|
||||
"serve": "lite-server -c=bs-config.json",
|
||||
"serve:e2e": "lite-server -c=bs-config.e2e.json",
|
||||
"prestart": "npm run build",
|
||||
"start": "concurrently \"npm run build:watch\" \"npm run serve\"",
|
||||
"pree2e": "webdriver-manager update && npm run build:e2e",
|
||||
"e2e": "concurrently \"npm run serve:e2e\" \"npm run protractor\" --kill-others --success first",
|
||||
"protractor": "protractor protractor.config.js",
|
||||
"pretest": "npm run build",
|
||||
"test": "concurrently \"npm run build:watch\" \"karma start karma.conf.js\"",
|
||||
"pretest:once": "npm run build",
|
||||
"test:once": "karma start karma.conf.js --single-run",
|
||||
"lint": "tslint ./src/**/*.ts -t verbose",
|
||||
|
||||
"build:upgrade": "tsc",
|
||||
"serve:upgrade": "http-server",
|
||||
"build:cli": "ng build --no-progress",
|
||||
"serve:cli": "http-server dist/",
|
||||
"build:aot": "ngc -p tsconfig-aot.json && rollup -c rollup-config.js",
|
||||
"serve:aot": "lite-server -c bs-config.aot.json",
|
||||
"start:webpack": "webpack-dev-server --inline --progress --port 8080",
|
||||
"test:webpack": "karma start karma.webpack.conf.js",
|
||||
"build:webpack": "rimraf dist && webpack --config config/webpack.prod.js --bail",
|
||||
"build:babel": "babel src -d src --extensions \".es6\" --source-maps",
|
||||
"copy-dist-files": "node ./copy-dist-files.js",
|
||||
"i18n": "ng-xi18n"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"angular-cli": "^1.0.0-beta.26"
|
||||
},
|
||||
"repository": {}
|
||||
}
|
10
aio/content/examples/_boilerplate/plnkr.json
Normal file
10
aio/content/examples/_boilerplate/plnkr.json
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"description": "QuickStart",
|
||||
"basePath": "src/",
|
||||
"files": [
|
||||
"app/app.component.ts",
|
||||
"index.html"
|
||||
],
|
||||
"open": "app/app.component.ts",
|
||||
"tags": ["quickstart"]
|
||||
}
|
116
aio/content/examples/_boilerplate/src/styles.css
Normal file
116
aio/content/examples/_boilerplate/src/styles.css
Normal file
@ -0,0 +1,116 @@
|
||||
/* #docregion , quickstart, toh */
|
||||
/* Master Styles */
|
||||
h1 {
|
||||
color: #369;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-size: 250%;
|
||||
}
|
||||
h2, h3 {
|
||||
color: #444;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-weight: lighter;
|
||||
}
|
||||
body {
|
||||
margin: 2em;
|
||||
}
|
||||
/* #enddocregion quickstart */
|
||||
body, input[text], button {
|
||||
color: #888;
|
||||
font-family: Cambria, Georgia;
|
||||
}
|
||||
/* #enddocregion toh */
|
||||
a {
|
||||
cursor: pointer;
|
||||
cursor: hand;
|
||||
}
|
||||
button {
|
||||
font-family: Arial;
|
||||
background-color: #eee;
|
||||
border: none;
|
||||
padding: 5px 10px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
cursor: hand;
|
||||
}
|
||||
button:hover {
|
||||
background-color: #cfd8dc;
|
||||
}
|
||||
button:disabled {
|
||||
background-color: #eee;
|
||||
color: #aaa;
|
||||
cursor: auto;
|
||||
}
|
||||
|
||||
/* Navigation link styles */
|
||||
nav a {
|
||||
padding: 5px 10px;
|
||||
text-decoration: none;
|
||||
margin-right: 10px;
|
||||
margin-top: 10px;
|
||||
display: inline-block;
|
||||
background-color: #eee;
|
||||
border-radius: 4px;
|
||||
}
|
||||
nav a:visited, a:link {
|
||||
color: #607D8B;
|
||||
}
|
||||
nav a:hover {
|
||||
color: #039be5;
|
||||
background-color: #CFD8DC;
|
||||
}
|
||||
nav a.active {
|
||||
color: #039be5;
|
||||
}
|
||||
|
||||
/* items class */
|
||||
.items {
|
||||
margin: 0 0 2em 0;
|
||||
list-style-type: none;
|
||||
padding: 0;
|
||||
width: 24em;
|
||||
}
|
||||
.items li {
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
left: 0;
|
||||
background-color: #EEE;
|
||||
margin: .5em;
|
||||
padding: .3em 0;
|
||||
height: 1.6em;
|
||||
border-radius: 4px;
|
||||
}
|
||||
.items li:hover {
|
||||
color: #607D8B;
|
||||
background-color: #DDD;
|
||||
left: .1em;
|
||||
}
|
||||
.items li.selected {
|
||||
background-color: #CFD8DC;
|
||||
color: white;
|
||||
}
|
||||
.items li.selected:hover {
|
||||
background-color: #BBD8DC;
|
||||
}
|
||||
.items .text {
|
||||
position: relative;
|
||||
top: -3px;
|
||||
}
|
||||
.items .badge {
|
||||
display: inline-block;
|
||||
font-size: small;
|
||||
color: white;
|
||||
padding: 0.8em 0.7em 0 0.7em;
|
||||
background-color: #607D8B;
|
||||
line-height: 1em;
|
||||
position: relative;
|
||||
left: -1px;
|
||||
top: -4px;
|
||||
height: 1.8em;
|
||||
margin-right: .8em;
|
||||
border-radius: 4px 0 0 4px;
|
||||
}
|
||||
/* #docregion toh */
|
||||
/* everywhere else */
|
||||
* {
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
}
|
42
aio/content/examples/_boilerplate/src/systemjs.config.js
Normal file
42
aio/content/examples/_boilerplate/src/systemjs.config.js
Normal file
@ -0,0 +1,42 @@
|
||||
/**
|
||||
* System configuration for Angular samples
|
||||
* Adjust as necessary for your application needs.
|
||||
*/
|
||||
(function (global) {
|
||||
System.config({
|
||||
paths: {
|
||||
// paths serve as alias
|
||||
'npm:': 'node_modules/'
|
||||
},
|
||||
// map tells the System loader where to look for things
|
||||
map: {
|
||||
// our app is within the app folder
|
||||
app: 'app',
|
||||
|
||||
// angular bundles
|
||||
'@angular/core': 'npm:@angular/core/bundles/core.umd.js',
|
||||
'@angular/common': 'npm:@angular/common/bundles/common.umd.js',
|
||||
'@angular/compiler': 'npm:@angular/compiler/bundles/compiler.umd.js',
|
||||
'@angular/platform-browser': 'npm:@angular/platform-browser/bundles/platform-browser.umd.js',
|
||||
'@angular/platform-browser-dynamic': 'npm:@angular/platform-browser-dynamic/bundles/platform-browser-dynamic.umd.js',
|
||||
'@angular/http': 'npm:@angular/http/bundles/http.umd.js',
|
||||
'@angular/router': 'npm:@angular/router/bundles/router.umd.js',
|
||||
'@angular/router/upgrade': 'npm:@angular/router/bundles/router-upgrade.umd.js',
|
||||
'@angular/forms': 'npm:@angular/forms/bundles/forms.umd.js',
|
||||
|
||||
// other libraries
|
||||
'rxjs': 'npm:rxjs',
|
||||
'angular-in-memory-web-api': 'npm:angular-in-memory-web-api/bundles/in-memory-web-api.umd.js'
|
||||
},
|
||||
// packages tells the System loader how to load when no filename and/or no extension
|
||||
packages: {
|
||||
app: {
|
||||
main: './main.js',
|
||||
defaultExtension: 'js'
|
||||
},
|
||||
rxjs: {
|
||||
defaultExtension: 'js'
|
||||
}
|
||||
}
|
||||
});
|
||||
})(this);
|
@ -0,0 +1,88 @@
|
||||
/**
|
||||
* WEB VERSION FOR CURRENT ANGULAR BUILD
|
||||
* (based on systemjs.config.js in angular.io)
|
||||
* System configuration for Angular samples
|
||||
* Adjust as necessary for your application needs.
|
||||
*
|
||||
* UNTESTED !
|
||||
*/
|
||||
(function (global) {
|
||||
System.config({
|
||||
// DEMO ONLY! REAL CODE SHOULD NOT TRANSPILE IN THE BROWSER
|
||||
transpiler: 'ts',
|
||||
typescriptOptions: {
|
||||
// Copy of compiler options in standard tsconfig.json
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"lib": ["es2015", "dom"],
|
||||
"noImplicitAny": true,
|
||||
"suppressImplicitAnyIndexErrors": true
|
||||
},
|
||||
meta: {
|
||||
'typescript': {
|
||||
"exports": "ts"
|
||||
}
|
||||
},
|
||||
paths: {
|
||||
// paths serve as alias
|
||||
'npm:': 'https://unpkg.com/',
|
||||
'ng:': 'https://cdn.rawgit.com/angular/'
|
||||
},
|
||||
// map tells the System loader where to look for things
|
||||
map: {
|
||||
// our app is within the app folder
|
||||
app: 'app',
|
||||
|
||||
// angular bundles
|
||||
'@angular/core': 'ng:core-builds/master/bundles/core.umd.js',
|
||||
'@angular/common': 'ng:common-builds/master/bundles/common.umd.js',
|
||||
'@angular/compiler': 'ng:compiler-builds/master/bundles/compiler.umd.js',
|
||||
'@angular/platform-browser': 'ng:platform-browser-builds/master/bundles/platform-browser.umd.js',
|
||||
'@angular/platform-browser-dynamic': 'ng:platform-browser-dynamic-builds/master/bundles/platform-browser-dynamic.umd.js',
|
||||
'@angular/http': 'ng:http-builds/master/bundles/http.umd.js',
|
||||
'@angular/router': 'ng:router-builds/master/bundles/router.umd.js',
|
||||
'@angular/router/upgrade': 'ng:router-builds/master/bundles/router-upgrade.umd.js',
|
||||
'@angular/forms': 'ng:forms-builds/master/bundles/forms.umd.js',
|
||||
'@angular/upgrade': 'ng:upgrade-builds/master/bundles/upgrade.umd.js',
|
||||
'@angular/upgrade/static': 'ng:upgrade-builds/master/bundles/upgrade-static.umd.js',
|
||||
|
||||
// angular testing umd bundles (overwrite the shim mappings)
|
||||
'@angular/core/testing': 'ng:core-builds/master/bundles/core-testing.umd.js',
|
||||
'@angular/common/testing': 'ng:common-builds/master/bundles/common-testing.umd.js',
|
||||
'@angular/compiler/testing': 'ng:compiler-builds/master/bundles/compiler-testing.umd.js',
|
||||
'@angular/platform-browser/testing': 'ng:platform-browser-builds/master/bundles/platform-browser-testing.umd.js',
|
||||
'@angular/platform-browser-dynamic/testing': 'ng:platform-browser-dynamic-builds/master/bundles/platform-browser-dynamic-testing.umd.js',
|
||||
'@angular/http/testing': 'ng:http-builds/master/bundles/http-testing.umd.js',
|
||||
'@angular/router/testing': 'ng:router-builds/master/bundles/router-testing.umd.js',
|
||||
'@angular/forms/testing': 'ng:forms-builds/master/bundles/forms-testing.umd.js',
|
||||
|
||||
// other libraries
|
||||
'rxjs': 'npm:rxjs@5.0.1',
|
||||
'angular-in-memory-web-api': 'npm:angular-in-memory-web-api/bundles/in-memory-web-api.umd.js',
|
||||
'ts': 'npm:plugin-typescript@5.2.7/lib/plugin.js',
|
||||
'typescript': 'npm:typescript@2.0.10/lib/typescript.js',
|
||||
|
||||
},
|
||||
// packages tells the System loader how to load when no filename and/or no extension
|
||||
packages: {
|
||||
app: {
|
||||
main: './main.ts',
|
||||
defaultExtension: 'ts'
|
||||
},
|
||||
rxjs: {
|
||||
defaultExtension: 'js'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})(this);
|
||||
|
||||
/*
|
||||
Copyright 2016 Google Inc. All Rights Reserved.
|
||||
Use of this source code is governed by an MIT-style license that
|
||||
can be found in the LICENSE file at http://angular.io/license
|
||||
*/
|
75
aio/content/examples/_boilerplate/src/systemjs.config.web.js
Normal file
75
aio/content/examples/_boilerplate/src/systemjs.config.web.js
Normal file
@ -0,0 +1,75 @@
|
||||
/**
|
||||
* WEB ANGULAR VERSION
|
||||
* (based on systemjs.config.js in angular.io)
|
||||
* System configuration for Angular samples
|
||||
* Adjust as necessary for your application needs.
|
||||
*/
|
||||
(function (global) {
|
||||
System.config({
|
||||
// DEMO ONLY! REAL CODE SHOULD NOT TRANSPILE IN THE BROWSER
|
||||
transpiler: 'ts',
|
||||
typescriptOptions: {
|
||||
// Copy of compiler options in standard tsconfig.json
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"lib": ["es2015", "dom"],
|
||||
"noImplicitAny": true,
|
||||
"suppressImplicitAnyIndexErrors": true
|
||||
},
|
||||
meta: {
|
||||
'typescript': {
|
||||
"exports": "ts"
|
||||
}
|
||||
},
|
||||
paths: {
|
||||
// paths serve as alias
|
||||
'npm:': 'https://unpkg.com/'
|
||||
},
|
||||
// map tells the System loader where to look for things
|
||||
map: {
|
||||
// our app is within the app folder
|
||||
app: 'app',
|
||||
|
||||
// angular bundles
|
||||
'@angular/core': 'npm:@angular/core/bundles/core.umd.js',
|
||||
'@angular/common': 'npm:@angular/common/bundles/common.umd.js',
|
||||
'@angular/compiler': 'npm:@angular/compiler/bundles/compiler.umd.js',
|
||||
'@angular/platform-browser': 'npm:@angular/platform-browser/bundles/platform-browser.umd.js',
|
||||
'@angular/platform-browser-dynamic': 'npm:@angular/platform-browser-dynamic/bundles/platform-browser-dynamic.umd.js',
|
||||
'@angular/http': 'npm:@angular/http/bundles/http.umd.js',
|
||||
'@angular/router': 'npm:@angular/router/bundles/router.umd.js',
|
||||
'@angular/router/upgrade': 'npm:@angular/router/bundles/router-upgrade.umd.js',
|
||||
'@angular/forms': 'npm:@angular/forms/bundles/forms.umd.js',
|
||||
'@angular/upgrade': 'npm:@angular/upgrade/bundles/upgrade.umd.js',
|
||||
'@angular/upgrade/static': 'npm:@angular/upgrade/bundles/upgrade-static.umd.js',
|
||||
|
||||
// other libraries
|
||||
'rxjs': 'npm:rxjs@5.0.1',
|
||||
'angular-in-memory-web-api': 'npm:angular-in-memory-web-api/bundles/in-memory-web-api.umd.js',
|
||||
'ts': 'npm:plugin-typescript@5.2.7/lib/plugin.js',
|
||||
'typescript': 'npm:typescript@2.0.10/lib/typescript.js',
|
||||
|
||||
},
|
||||
// packages tells the System loader how to load when no filename and/or no extension
|
||||
packages: {
|
||||
app: {
|
||||
main: './main.ts',
|
||||
defaultExtension: 'ts'
|
||||
},
|
||||
rxjs: {
|
||||
defaultExtension: 'js'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})(this);
|
||||
|
||||
/*
|
||||
Copyright 2016 Google Inc. All Rights Reserved.
|
||||
Use of this source code is governed by an MIT-style license that
|
||||
can be found in the LICENSE file at http://angular.io/license
|
||||
*/
|
21
aio/content/examples/_boilerplate/src/tsconfig.json
Normal file
21
aio/content/examples/_boilerplate/src/tsconfig.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"lib": [ "es2015", "dom" ],
|
||||
"noImplicitAny": true,
|
||||
"suppressImplicitAnyIndexErrors": true,
|
||||
"typeRoots": [
|
||||
"../../../node_modules/@types/"
|
||||
]
|
||||
},
|
||||
"compileOnSave": true,
|
||||
"exclude": [
|
||||
"node_modules/*",
|
||||
"**/*-aot.ts"
|
||||
]
|
||||
}
|
93
aio/content/examples/_boilerplate/tslint.json
Normal file
93
aio/content/examples/_boilerplate/tslint.json
Normal file
@ -0,0 +1,93 @@
|
||||
{
|
||||
"rules": {
|
||||
"class-name": true,
|
||||
"comment-format": [
|
||||
true,
|
||||
"check-space"
|
||||
],
|
||||
"curly": true,
|
||||
"eofline": true,
|
||||
"forin": true,
|
||||
"indent": [
|
||||
true,
|
||||
"spaces"
|
||||
],
|
||||
"label-position": true,
|
||||
"label-undefined": true,
|
||||
"max-line-length": [
|
||||
true,
|
||||
140
|
||||
],
|
||||
"member-access": false,
|
||||
"member-ordering": [
|
||||
true,
|
||||
"static-before-instance",
|
||||
"variables-before-functions"
|
||||
],
|
||||
"no-arg": true,
|
||||
"no-bitwise": true,
|
||||
"no-console": [
|
||||
true,
|
||||
"debug",
|
||||
"info",
|
||||
"time",
|
||||
"timeEnd",
|
||||
"trace"
|
||||
],
|
||||
"no-construct": true,
|
||||
"no-debugger": true,
|
||||
"no-duplicate-key": true,
|
||||
"no-duplicate-variable": true,
|
||||
"no-empty": false,
|
||||
"no-eval": true,
|
||||
"no-inferrable-types": true,
|
||||
"no-shadowed-variable": true,
|
||||
"no-string-literal": false,
|
||||
"no-switch-case-fall-through": true,
|
||||
"no-trailing-whitespace": true,
|
||||
"no-unused-expression": true,
|
||||
"no-unused-variable": true,
|
||||
"no-unreachable": true,
|
||||
"no-use-before-declare": true,
|
||||
"no-var-keyword": true,
|
||||
"object-literal-sort-keys": false,
|
||||
"one-line": [
|
||||
true,
|
||||
"check-open-brace",
|
||||
"check-catch",
|
||||
"check-else",
|
||||
"check-whitespace"
|
||||
],
|
||||
"quotemark": [
|
||||
true,
|
||||
"single"
|
||||
],
|
||||
"radix": true,
|
||||
"semicolon": [
|
||||
"always"
|
||||
],
|
||||
"triple-equals": [
|
||||
true,
|
||||
"allow-null-check"
|
||||
],
|
||||
"typedef-whitespace": [
|
||||
true,
|
||||
{
|
||||
"call-signature": "nospace",
|
||||
"index-signature": "nospace",
|
||||
"parameter": "nospace",
|
||||
"property-declaration": "nospace",
|
||||
"variable-declaration": "nospace"
|
||||
}
|
||||
],
|
||||
"variable-name": false,
|
||||
"whitespace": [
|
||||
true,
|
||||
"check-branch",
|
||||
"check-decl",
|
||||
"check-operator",
|
||||
"check-separator",
|
||||
"check-type"
|
||||
]
|
||||
}
|
||||
}
|
351
aio/content/examples/animations/e2e-spec.ts
Normal file
351
aio/content/examples/animations/e2e-spec.ts
Normal file
@ -0,0 +1,351 @@
|
||||
'use strict'; // necessary for es6 output in node
|
||||
|
||||
import { browser, element, by, ElementFinder } from 'protractor';
|
||||
import { logging, promise } from 'selenium-webdriver';
|
||||
|
||||
/**
|
||||
* The tests here basically just checking that the end styles
|
||||
* of each animation are in effect.
|
||||
*
|
||||
* Relies on the Angular testability only becoming stable once
|
||||
* animation(s) have finished.
|
||||
*
|
||||
* Ideally we'd use https://developer.mozilla.org/en-US/docs/Web/API/Document/getAnimations
|
||||
* but they're not supported in Chrome at the moment. The upcoming nganimate polyfill
|
||||
* may also add some introspection support.
|
||||
*/
|
||||
describe('Animation Tests', () => {
|
||||
|
||||
const INACTIVE_COLOR = 'rgba(238, 238, 238, 1)';
|
||||
const ACTIVE_COLOR = 'rgba(207, 216, 220, 1)';
|
||||
const NO_TRANSFORM_MATRIX_REGEX = /matrix\(1,\s*0,\s*0,\s*1,\s*0,\s*0\)/;
|
||||
|
||||
beforeEach(() => {
|
||||
browser.get('');
|
||||
});
|
||||
|
||||
describe('basic states', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-basic'));
|
||||
});
|
||||
|
||||
it('animates between active and inactive', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(ACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('styles inline in transitions', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(function() {
|
||||
host = element(by.css('hero-list-inline-styles'));
|
||||
});
|
||||
|
||||
it('are not kept after animation', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('combined transition syntax', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-combined-transitions'));
|
||||
});
|
||||
|
||||
it('animates between active and inactive', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(ACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('two-way transition syntax', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-twoway'));
|
||||
});
|
||||
|
||||
it('animates between active and inactive', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(ACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('enter & leave', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-enter-leave'));
|
||||
});
|
||||
|
||||
it('adds and removes element', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
|
||||
removeHero();
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('enter & leave & states', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(function() {
|
||||
host = element(by.css('hero-list-enter-leave-states'));
|
||||
});
|
||||
|
||||
it('adds and removes and animates between active and inactive', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
|
||||
removeHero();
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('auto style calc', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(function() {
|
||||
host = element(by.css('hero-list-auto'));
|
||||
});
|
||||
|
||||
it('adds and removes element', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
expect(li.getCssValue('height')).toBe('50px');
|
||||
|
||||
removeHero();
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('different timings', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-timings'));
|
||||
});
|
||||
|
||||
it('adds and removes element', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
expect(li.getCssValue('opacity')).toMatch('1');
|
||||
|
||||
removeHero();
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('multiple keyframes', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-multistep'));
|
||||
});
|
||||
|
||||
it('adds and removes element', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
expect(li.getCssValue('opacity')).toMatch('1');
|
||||
|
||||
removeHero();
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('parallel groups', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-groups'));
|
||||
});
|
||||
|
||||
it('adds and removes element', () => {
|
||||
addInactiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
expect(li.getCssValue('transform')).toMatch(NO_TRANSFORM_MATRIX_REGEX);
|
||||
expect(li.getCssValue('opacity')).toMatch('1');
|
||||
|
||||
removeHero(700);
|
||||
expect(li.isPresent()).toBe(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('adding active heroes', () => {
|
||||
|
||||
let host: ElementFinder;
|
||||
|
||||
beforeEach(() => {
|
||||
host = element(by.css('hero-list-basic'));
|
||||
});
|
||||
|
||||
it('animates between active and inactive', () => {
|
||||
addActiveHero();
|
||||
|
||||
let li = host.element(by.css('li'));
|
||||
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(ACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.0);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(INACTIVE_COLOR);
|
||||
|
||||
li.click();
|
||||
browser.driver.sleep(300);
|
||||
expect(getScaleX(li)).toBe(1.1);
|
||||
expect(li.getCssValue('backgroundColor')).toBe(ACTIVE_COLOR);
|
||||
});
|
||||
});
|
||||
|
||||
describe('callbacks', () => {
|
||||
it('fires a callback on start and done', () => {
|
||||
addActiveHero();
|
||||
browser.manage().logs().get(logging.Type.BROWSER)
|
||||
.then((logs: logging.Entry[]) => {
|
||||
const animationMessages = logs.filter((log) => {
|
||||
return log.message.indexOf('Animation') !== -1 ? true : false;
|
||||
});
|
||||
|
||||
expect(animationMessages.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function addActiveHero(sleep?: number) {
|
||||
sleep = sleep || 500;
|
||||
element(by.buttonText('Add active hero')).click();
|
||||
browser.driver.sleep(sleep);
|
||||
}
|
||||
|
||||
function addInactiveHero(sleep?: number) {
|
||||
sleep = sleep || 500;
|
||||
element(by.buttonText('Add inactive hero')).click();
|
||||
browser.driver.sleep(sleep);
|
||||
}
|
||||
|
||||
function removeHero(sleep?: number) {
|
||||
sleep = sleep || 500;
|
||||
element(by.buttonText('Remove hero')).click();
|
||||
browser.driver.sleep(sleep);
|
||||
}
|
||||
|
||||
function getScaleX(el: ElementFinder) {
|
||||
return Promise.all([
|
||||
getBoundingClientWidth(el),
|
||||
getOffsetWidth(el)
|
||||
]).then(function(promiseResolutions) {
|
||||
let clientWidth = promiseResolutions[0];
|
||||
let offsetWidth = promiseResolutions[1];
|
||||
return clientWidth / offsetWidth;
|
||||
});
|
||||
}
|
||||
|
||||
function getBoundingClientWidth(el: ElementFinder): promise.Promise<number> {
|
||||
return browser.executeScript(
|
||||
'return arguments[0].getBoundingClientRect().width',
|
||||
el.getWebElement()
|
||||
);
|
||||
}
|
||||
|
||||
function getOffsetWidth(el: ElementFinder): promise.Promise<number> {
|
||||
return browser.executeScript(
|
||||
'return arguments[0].offsetWidth',
|
||||
el.getWebElement()
|
||||
);
|
||||
}
|
||||
});
|
1
aio/content/examples/animations/ts/.gitignore
vendored
Normal file
1
aio/content/examples/animations/ts/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
**/*.js
|
8
aio/content/examples/animations/ts/plnkr.json
Normal file
8
aio/content/examples/animations/ts/plnkr.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"description": "Angular Animations",
|
||||
"basePath": "src/",
|
||||
"files":[
|
||||
"!**/*.d.ts",
|
||||
"!**/*.js"
|
||||
]
|
||||
}
|
1011
aio/content/examples/animations/ts/plnkr.no-link.html
Normal file
1011
aio/content/examples/animations/ts/plnkr.no-link.html
Normal file
File diff suppressed because it is too large
Load Diff
33
aio/content/examples/animations/ts/src/app/app.module.ts
Normal file
33
aio/content/examples/animations/ts/src/app/app.module.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { NgModule } from '@angular/core';
|
||||
import { BrowserModule } from '@angular/platform-browser';
|
||||
|
||||
import { HeroTeamBuilderComponent } from './hero-team-builder.component';
|
||||
import { HeroListBasicComponent } from './hero-list-basic.component';
|
||||
import { HeroListInlineStylesComponent } from './hero-list-inline-styles.component';
|
||||
import { HeroListEnterLeaveComponent } from './hero-list-enter-leave.component';
|
||||
import { HeroListEnterLeaveStatesComponent } from './hero-list-enter-leave-states.component';
|
||||
import { HeroListCombinedTransitionsComponent } from './hero-list-combined-transitions.component';
|
||||
import { HeroListTwowayComponent } from './hero-list-twoway.component';
|
||||
import { HeroListAutoComponent } from './hero-list-auto.component';
|
||||
import { HeroListGroupsComponent } from './hero-list-groups.component';
|
||||
import { HeroListMultistepComponent } from './hero-list-multistep.component';
|
||||
import { HeroListTimingsComponent } from './hero-list-timings.component';
|
||||
|
||||
@NgModule({
|
||||
imports: [ BrowserModule ],
|
||||
declarations: [
|
||||
HeroTeamBuilderComponent,
|
||||
HeroListBasicComponent,
|
||||
HeroListInlineStylesComponent,
|
||||
HeroListCombinedTransitionsComponent,
|
||||
HeroListTwowayComponent,
|
||||
HeroListEnterLeaveComponent,
|
||||
HeroListEnterLeaveStatesComponent,
|
||||
HeroListAutoComponent,
|
||||
HeroListTimingsComponent,
|
||||
HeroListMultistepComponent,
|
||||
HeroListGroupsComponent
|
||||
],
|
||||
bootstrap: [ HeroTeamBuilderComponent ]
|
||||
})
|
||||
export class AppModule { }
|
@ -0,0 +1,46 @@
|
||||
import {
|
||||
Component,
|
||||
Input,
|
||||
trigger,
|
||||
state,
|
||||
style,
|
||||
animate,
|
||||
transition
|
||||
} from '@angular/core';
|
||||
|
||||
import { Heroes } from './hero.service';
|
||||
|
||||
@Component({
|
||||
moduleId: module.id,
|
||||
selector: 'hero-list-auto',
|
||||
// #docregion template
|
||||
template: `
|
||||
<ul>
|
||||
<li *ngFor="let hero of heroes"
|
||||
[@shrinkOut]="'in'">
|
||||
{{hero.name}}
|
||||
</li>
|
||||
</ul>
|
||||
`,
|
||||
// #enddocregion template
|
||||
styleUrls: ['./hero-list.component.css'],
|
||||
|
||||
/* When the element leaves (transition "in => void" occurs),
|
||||
* get the element's current computed height and animate
|
||||
* it down to 0.
|
||||
*/
|
||||
// #docregion animationdef
|
||||
animations: [
|
||||
trigger('shrinkOut', [
|
||||
state('in', style({height: '*'})),
|
||||
transition('* => void', [
|
||||
style({height: '*'}),
|
||||
animate(250, style({height: 0}))
|
||||
])
|
||||
])
|
||||
]
|
||||
// #enddocregion animationdef
|
||||
})
|
||||
export class HeroListAutoComponent {
|
||||
@Input() heroes: Heroes;
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
// #docplaster
|
||||
// #docregion
|
||||
// #docregion imports
|
||||
import {
|
||||
Component,
|
||||
Input,
|
||||
trigger,
|
||||
state,
|
||||
style,
|
||||
transition,
|
||||
animate
|
||||
} from '@angular/core';
|
||||
// #enddocregion imports
|
||||
|
||||
import { Heroes } from './hero.service';
|
||||
|
||||
@Component({
|
||||
moduleId: module.id,
|
||||
selector: 'hero-list-basic',
|
||||
// #enddocregion
|
||||
/* The click event calls hero.toggleState(), which
|
||||
* causes the state of that hero to switch from
|
||||
* active to inactive or vice versa.
|
||||
*/
|
||||
// #docregion
|
||||
// #docregion template
|
||||
template: `
|
||||
<ul>
|
||||
<li *ngFor="let hero of heroes"
|
||||
[@heroState]="hero.state"
|
||||
(click)="hero.toggleState()">
|
||||
{{hero.name}}
|
||||
</li>
|
||||
</ul>
|
||||
`,
|
||||
// #enddocregion template
|
||||
styleUrls: ['./hero-list.component.css'],
|
||||
// #enddocregion
|
||||
/**
|
||||
* Define two states, "inactive" and "active", and the end
|
||||
* styles that apply whenever the element is in those states.
|
||||
* Then define animations for transitioning between the states,
|
||||
* one in each direction
|
||||
*/
|
||||
// #docregion
|
||||
// #docregion animationdef
|
||||
animations: [
|
||||
trigger('heroState', [
|
||||
// #docregion states
|
||||
state('inactive', style({
|
||||
backgroundColor: '#eee',
|
||||
transform: 'scale(1)'
|
||||
})),
|
||||
state('active', style({
|
||||
backgroundColor: '#cfd8dc',
|
||||
transform: 'scale(1.1)'
|
||||
})),
|
||||
// #enddocregion states
|
||||
// #docregion transitions
|
||||
transition('inactive => active', animate('100ms ease-in')),
|
||||
transition('active => inactive', animate('100ms ease-out'))
|
||||
// #enddocregion transitions
|
||||
])
|
||||
]
|
||||
// #enddocregion animationdef
|
||||
})
|
||||
export class HeroListBasicComponent {
|
||||
@Input() heroes: Heroes;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user