Compare commits

..

182 Commits

Author SHA1 Message Date
268e9772d5 release: cut the v7.0.0-beta.5 release 2018-09-05 21:22:46 -07:00
e2c67ba155 docs: release notes for the v6.1.7 release 2018-09-05 21:18:53 -07:00
dcad0544d7 build: update ngcontainer to node 10.9.0 (#25812)
PR Close #25812
2018-09-05 11:37:26 -07:00
1bc6f64eb5 docs: update event page (#25799)
docs: change reactiveconf event location
PR Close #25799
2018-09-05 11:37:02 -07:00
397b047eff docs: fix showcase address truly-ui (#25757)
PR Close #25757
2018-09-05 11:36:39 -07:00
d96e962da3 test(ivy): mock the filesystem in ngcc main() integration tests (#25557)
Bazel does not like the filesystem being modified.
This commit a temporary mock filesystem that can be modified as needed.

PR Close #25557
2018-09-05 11:35:47 -07:00
b0cb134815 feat(ivy): implement ngcc build marker (#25557)
`ngcc` adds marker files to each folder that has been
compiled, containing the version of the ngcc used.

When compiling, it will ignore folders that contain these
marker files, as long as the version matches.

PR Close #25557
2018-09-05 11:35:47 -07:00
2a672a97ab fix(upgrade): trigger $destroy event on upgraded component element (#25357)
Fixes #25334

PR Close #25357
2018-09-05 11:35:14 -07:00
71007ef9b2 refactor(upgrade): share code for destroying upgraded components between dynamic and static (#25357)
PR Close #25357
2018-09-05 11:35:14 -07:00
51c26b8afb test: remove deprecated Buffer usage in sourcemap test (#25805)
PR Close #25805
2018-09-05 09:38:48 -07:00
1e7a873cf4 build(bazel): remove unused angular.tsconfig.json integration/bazel test (#25778)
PR Close #25778
2018-09-05 09:38:19 -07:00
13b8399d0c test(docs-infra): fix double-slash in URL of aio_monitoring test (#25641)
As part of the tests run in the CircleCI `aio_monitoring` job, we need
to retrieve `sitemap.xml` from the site under test. Previously, the URL
used to retrieve that contained a double-slash (`//`). At some point,
Firebase (which is used for hosting the site) stopped normalizing
double-slashes to a single slash, causing the test to fail.

This commit fixes the problem by ensuring that the constructed URLs do
not contain double-slashes.

PR Close #25641
2018-09-05 09:37:55 -07:00
010e35d995 feat(router): warn if navigation triggered outside Angular zone (#24959)
closes #15770, closes #15946, closes #24728

PR Close #24959
2018-09-05 09:35:14 -07:00
234661b3d6 feat(docs-infra): disable "status" selector in API list when displaying only packages (#25718)
Closes #25708

PR Close #25718
2018-09-05 09:28:28 -07:00
4a04ab8823 build(docs-infra): do not render internals in package API pages (#25723)
Closes #24493

PR Close #25723
2018-09-05 09:28:05 -07:00
a417b2b419 fix(ivy): detect frozen flyweight objects in definitions and unfreeze (#25755)
defineComponent() and friends can return a flyweight EMPTY object for
specific fields when they contain no data. InheritDefinitionFeature
was attempting to write into these flyweight objects, which have been
protected with Object.freeze().

This commit adds detection to InheritDefinitionFeature to identify the
frozen objects.

PR Close #25755
2018-09-05 09:27:41 -07:00
2c66523222 docs(changelog): fix version 6.1.5 typo (#25760)
PR Close #25760
2018-09-05 09:27:15 -07:00
25c1f331d6 refactor(docs-infra): bump polyfills payload limit (#25806)
PR Close #25806
2018-09-05 09:23:10 -07:00
59aab14394 refactor(docs-infra): simplify custom-element polyfill setup (#25806)
PR Close #25806
2018-09-05 09:23:10 -07:00
c1ae3c16e8 build(docs-infra): ensure root node_modules exists (#25811)
Now that the doc-gen parses the imports of TS source
files we need to ensure that the root node_modules
exists. Otherwise running `yarn docs` produces an
obscure error:

```
Error: No SourceFile found with path node_modules/@types/jasmine/index.d.ts
```

Closes #25759

PR Close #25811
2018-09-05 09:22:46 -07:00
51c0d9cae9 style(ivy): remove unused ivy code (#25780)
PR Close #25780
2018-09-04 12:12:04 -07:00
08dfbc5475 fix(ivy): reexport __POST_NGCC__ symbols as private to prevent DCE in FESM (#25780)
While creating FESM files, rollup usually drops all unused symbols.
All *__POST_NGCC__ are unused unless ngcc rewires stuff. To prevent this DCE
we reexport them as private symbols. If ngcc is not used, these symbols will
be dropped when we optimize an application bundle.

We don't have an infrastructure to test this fix, so I just manually inspected
the bundles before and after to verify that the fix works.

PR Close #25780
2018-09-04 12:12:04 -07:00
2f1bc1aa1a docs: add pwa keyword to service worker page (#25725)
PR Close #25725
2018-09-04 12:09:54 -07:00
cc29b9cf93 fix(ivy): use globally unique names for i18n constants (#25689)
Closure compiler requires that the i18n message constants of the form

const MSG_XYZ = goog.getMessage('...');

have names that are unique across an entire compilation, even if the
variables themselves are local to a given module. This means that in
practice these names must be unique in a codebase.

The best way to guarantee this requirement is met is to encode the
relative file name of the file into which the constant is being written
into the constant name itself. This commit implements that solution.

PR Close #25689
2018-09-04 12:09:29 -07:00
bd0eb0d1d4 docs: correct misspellings and add missing punctuation in tutorial (#25676)
PR Close #25676
2018-09-04 12:08:52 -07:00
e84da1981d ci: ensure build-packages-dist works on OS/X bash (#25591)
PR Close #25591
2018-09-04 12:08:24 -07:00
abd29f5049 docs(forms): update API reference for reactive and template-driven forms modules (#25687)
PR Close #25687
2018-08-31 13:37:40 -07:00
6def18a95e fix(ivy): support directive outputs on ng-template (#25717)
Compiler part of #25698
Fixes #25697

PR Close #25717
2018-08-31 13:37:16 -07:00
34be51898d fix(ivy): support host bindings on dynamically created components (#25765)
PR Close #25765
2018-08-31 13:36:53 -07:00
1e3460be0b refactor(ivy): remove obsolete types (#25767)
In the past factories could return an array with content queries
but we no longer manage queries in factory functions.

PR Close #25767
2018-08-31 13:36:22 -07:00
31349fde90 build(bazel): make resolveTypeReferenceDirectives override work with both ts 2.9 & ts 3.0 (#25581)
PR Close #25581
2018-08-31 11:12:03 -07:00
910381ddbd build(bazel): fix bazel types reference directive resolves (#25581)
PR Close #25581
2018-08-31 11:12:03 -07:00
20b9c61d4c perf(ivy): speed up ngcc ivy switch processing (#25534)
Only parse the AST for ngcc ivy switch constants
if the marker is not found in the module text.

PR Close #25534
2018-08-31 09:47:50 -07:00
e964319fe9 test(ivy): test Esm5Renderer.getSwitchableDeclarations (#25534)
Also incorporates a refactoring of the tests to make them less fragile.

PR Close #25534
2018-08-31 09:47:50 -07:00
26cd9f5433 feat(ivy): implement Renderer.getSwitchableDeclarations (#25534)
This supports the "ngcc ivy switch" specified in #25238.

PR Close #25534
2018-08-31 09:47:50 -07:00
e73e864f87 test(ivy): refactor Esm5Renderer tests to make them less fragile (#25534)
PR Close #25534
2018-08-31 09:47:50 -07:00
73047483a1 test(ivy): refactor Esm2015Renderer tests to make them less fragile (#25534)
PR Close #25534
2018-08-31 09:47:50 -07:00
6f168b7a0f feat(ivy): implement NgccReflectionHost.getSwitchableDeclarations() (#25534)
This method will be used to find all the places where the "ivy switch"
will occur. See #25238

PR Close #25534
2018-08-31 09:47:49 -07:00
a469c2c412 feat(ivy): produce contextual diagnostics in ngtsc mode (#25647)
TypeScript has a more modern diagnostic emit function which produces
contextually annotated error information, using colors in the console
to indicate where in the code the error occurs.

This commit swiches ngtsc to use this format for diagnostics when
emitting them after a failed compilation.

PR Close #25647
2018-08-31 09:43:31 -07:00
38f624d7e3 feat(ivy): output diagnostics for many errors in ngtsc (#25647)
This commit takes the first steps towards ngtsc producing real
TypeScript diagnostics instead of simply throwing errors when
encountering incorrect code.

A new class is introduced, FatalDiagnosticError, which can be thrown by
handlers whenever a condition in the code is encountered which by
necessity prevents the class from being compiled. This error type is
convertable to a ts.Diagnostic which represents the type and source of
the error.

Error codes are introduced for Angular errors, and are prefixed with -99
(so error code 1001 becomes -991001) to distinguish them from other TS
errors.

A function is provided which will read TS diagnostic output and convert
the TS errors to NG errors if they match this negative error code
format.

PR Close #25647
2018-08-31 09:43:30 -07:00
b424b3187e fix(compiler): add hostVars and support pure functions in host bindings (#25626)
PR Close #25626
2018-08-31 09:42:58 -07:00
00f13110be feat(ivy): support injecting Renderer2 (#25523)
PR Close #25523
2018-08-31 09:42:36 -07:00
ccb4a396f0 test(docs-infra): test that the "suggest edit" buttons are visible where expected (#24378)
PR Close #24378
2018-08-31 09:42:10 -07:00
d539122466 refactor(docs-infra): refactor templates (#24378)
PR Close #24378
2018-08-31 09:42:10 -07:00
b89a7dd4a2 fix(docs-infra): show "suggest edits" only for /guide and /tutorial dirs (#24378)
PR Close #24378
2018-08-31 09:42:10 -07:00
9533cc9809 feat(docs-infra): add "suggest edits" feature to all docs (#24378)
PR Close #24378
2018-08-31 09:42:10 -07:00
06d04002fd fix(benchpress): Use performance.mark() instead of console.time() (#24114)
Previously, benchpress would use `console.time()` and
`console.timeEnd()` to measure the start and end of a test in the
performance log. This used to work over navigations - if you called
`console.time(id)` then navigated to a different page, calling
`console.timeEnd(id)` would still insert an event in the performance
log.

As of Chrome 65, this is no longer the case. `console.timeEnd(id)` will
simply not insert an event in the performance log unless
`console.time(id)` was called on the same page. Likewise, using
`performance.measure()` does not work if the starting mark was on a
different page.

This simple workaround uses `performance.mark()` to insert events in the
performance log at the start and end of the test. Benchpress looks for
'-bpstart' and '-bpend' in the name of the performance mark, and
normalizes that to the start and end events expected by PerflogMetric

PR Close #24114
2018-08-30 21:33:40 -07:00
6143da66b2 fix(elements): add compiler dependency (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
a080ffc743 fix(elements): add compiler to integration (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
a8210d010b fix(elements): strict null checks (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
c9844a2f01 feat(elements): enable Shadow DOM v1 and slots (#24861)
When using ViewEncapsulation.ShadowDom, Angular will not remove the child nodes of the DOM node a root Component is bootstrapped into. This enables developers building Angular Elements to use the `<slot>` element to do native content projection.

PR Close #24861
2018-08-30 21:33:14 -07:00
4815b92495 test(elements): add basic integration test for angular elements (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
d76a7d6f7c test(core): update Web Platform feature detection (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
6e6489a408 ci: update firefox version to 61 (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
1d8e821276 ci: update chrome version to 67 (#24861)
PR Close #24861
2018-08-30 21:33:14 -07:00
6e828bba88 fix(core): do not clear element content when using shadow dom (#24861)
PR Close #24861
2018-08-30 21:33:13 -07:00
1f59f2f04d fix(core): size regression with closure compiler (#25531)
By pulling in `compiler` into `core` the `compiler` was not
100% tree-shakable and about  8KB of code was retained
when tree-shaken with closure.

PR Close #25531
2018-08-30 21:22:40 -07:00
371df35624 fix(ivy): register to directive outputs on ng-template / ng-container (#25698)
Runtime part of #25697

PR Close #25698
2018-08-30 21:22:01 -07:00
3809e0fcae fix(bazel): protractor rule should include *.e2e-spec.js (#25701)
PR Close #25701
2018-08-30 21:21:27 -07:00
b06f1c0087 refactor(ivy): remove duplicate global (#25756)
PR Close #25756
2018-08-30 21:20:15 -07:00
2379ad1a4b docs: edit and organize di guide (#21915)
PR Close #21915
2018-08-30 13:15:47 -04:00
dd2a650c34 release: cut the v7.0.0-beta.4 release 2018-08-29 16:20:28 -07:00
72b3b27348 docs: release notes for the v6.1.6 release 2018-08-29 16:15:08 -07:00
f394ba0e27 fix(bazel): Cache fileNameToModuleName lookups (#25731)
This saves expensive re-parsing of the file when not run as a Bazel worker

PR Close #25731
2018-08-29 17:39:22 -04:00
268cf7989c docs: release notes for the v6.1.5 release 2018-08-28 21:51:00 -07:00
9ee29ecdd4 docs: clarification of hero selection in routing section (#25634)
PR Close #25634
2018-08-28 22:16:18 -04:00
42072c4d0d fix(bazel): only lookup amd module-name tags in .d.ts files (#25710)
PR Close #25710
2018-08-28 21:07:15 -04:00
29761ea5f8 refactor(compiler-cli): remove tsickle from dependencies (#25649)
Users can still install tsickle if they want closure-compatible output.

PR Close #25649
2018-08-28 16:44:43 -04:00
a22fb91e1a refactor(compiler-cli): remove a test that we no longer care about (#25649)
Alex R. says that we no longer care about the behavior this is testing.

PR Close #25649
2018-08-28 16:44:43 -04:00
0386c44acc fix(ivy): inject attributes for directives on ng-template / ng-container (#25697)
PR Close #25697
2018-08-28 14:34:59 -04:00
0fe708ff82 ci: remove vicb from pullapprove.yml (#25702)
PR Close #25702
2018-08-28 11:15:53 -07:00
b069514818 fix(docs-infra): temporary SW fix (for online mode) (#25692)
Due to unknown reasons, Firebase seems to return a 301 response for
`/index.html`, but without a `Location` header. According to the spec,
the browser will not follow the redirect, considering the response as
failed, instead.

This commit temporarily removes `index.html` from hashed resources and
changes `index` to `/` in `ngsw-config.json`, until we figure out an
appropriate long-term solution.

PR Close #25692
2018-08-28 13:33:48 -04:00
0024d68add feat(ivy): add support for resolving view data from a DOM node (#25627)
PR Close #25627
2018-08-27 21:15:29 -04:00
317d40d879 test(compiler-cli): improve testing harness for incremental compilation (#25275)
In tsc 3.0 the check that enables program structure reuse in tryReuseStructureFromOldProgram has changed
and now uses identity comparison on arrays within CompilerOptions. Since we recreate the options
on each incremental compilation, we now fail this check.

After this change the default set of options is reused in between incremental compilations, but we still
allow options to be overriden if needed.

PR Close #25275
2018-08-27 21:07:53 -04:00
ab32ac6bb7 test(compiler-cli): fix the incremental ngc tests so that they run under bazel (#25275)
PR Close #25275
2018-08-27 21:07:53 -04:00
5653fada32 feat: add TypeScript 3 support (#25275)
PR Close #25275
2018-08-27 21:07:53 -04:00
c230173716 build: update Bazel to 0.16 (#25646)
PR Close #25646
2018-08-27 18:20:32 -04:00
366195e182 build(bazel): esm5_outputs_aspect to work with targets such as ts_proto_library with no replay_params attribute (#25605)
PR Close #25605
2018-08-27 18:19:44 -04:00
b1902db0cb build(docs-infra): render all overloads if they are abstract (#25670)
In an overloaded method, the overload with the function body is the
actual method doc, and this doc is not included in the list of "additional"
overloads.

Moreover, the logic (all in dgeni-packages) is that if none of the items
has a body then we use the first overload as the actual method doc.

In the case of abstract methods, none of the methods have a body. So we
have a situation where the overloads collection does not contain the first
abstract method, even though it is not the "implementation" of the method.
Therefore we need to still render it.

Closes #25610

PR Close #25670
2018-08-27 18:19:08 -04:00
a081d207f8 build(docs-infra): remove "annotations" section from API pages (#25677)
PR Close #25677
2018-08-27 18:18:41 -04:00
6ed79934c4 fix(bazel): move bazel managed runtime deps for downstream usage (#25690)
PR Close #25690
2018-08-27 18:18:17 -04:00
6a0f78fabf fix(ivy): match directives on bindings and element outputs (#25614)
Closes #23560

PR Close #25614
2018-08-27 18:17:25 -04:00
3634575d89 test(common): fix double instantiation in NgSwitch test (#25632)
One of the tests was creating TestComponent instance _and_ using
global state making this test not predictable. Fixing the test
by making sure that TestComponent is instantiated only once.

PR Close #25632
2018-08-27 16:40:56 -04:00
f596930c8c fix(docs-infra): ignore server-side redirected URLs in the SW (#19795)
This allows URLs to be passed through to the server (where they are
properly redirected), instead of serving `index.html` from the SW.

Known issue:
`/docs/` will be passed through to the server. `/docs` (without the
trailing slash) will be correctly treated as a navigation URL and
handled by the SW.
We don't link to `/docs/` from within the app, but if there are external
links to `/docs/` they will require a round-trip to the server and will
not work in offline mode.

PR Close #19795
2018-08-27 16:30:43 -04:00
b3c56f021f build(docs-infra): downgrade @angular-devkit/build-angular to 0.6.7 (#19795)
The is a bug in versions 0.6.8+ that breaks when trying to use Jasmine's
mock clock.

Related to angular/angular-cli#11626.

PR Close #19795
2018-08-27 16:30:43 -04:00
864c22fb79 build(docs-infra): update size limits (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
4d83a0a789 build(docs-infra): upgrade @angular/* to 6.1.0-rc.3 and rxjs to 6.2.2 (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
2569fd4d75 build(docs-infra): upgrade @angular/cli to 6.1.0-rc.3 and @angular-devkit/build-angular to 0.7.0-rc.3 (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
c73ebe79b3 build(docs-infra): update size limits (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
110ab2230b build(docs-infra): upgrade @angular/material to 6.0.2 (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
4a61cb3fab build(docs-infra): upgrade @angular/* to 6.0.2 and rxjs to 6.1.0 (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
4dc5afb5c6 build(docs-infra): upgrade @angular/cli to 6.0.3 (#19795)
PR Close #19795
2018-08-27 16:30:43 -04:00
8c2c6b5d1a build(docs-infra): remove unused dependencies (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
f71cce7f9b build(docs-infra): remove the dependency on rxjs-compat (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
53c1efb50a build(docs-infra): enable ServiceWorker in cli config (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
b6ccd9f7bd fix(docs-infra): correctly show icon for fetch error when offline (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
8b614d4e1b refactor(docs-infra): move concept icons to more appropriate location (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
7f905da335 refactor(docs-infra): remove unused images (#19795)
PR Close #19795
2018-08-27 16:30:42 -04:00
be24f9f0cb feat(docs-infra): Convert AIO to use the new Service Worker 5.0.0. (#19795)
AIO is currently using a beta version of @angular/service-worker.
Since that was implemented, the SW has been rewritten and released
as part of Angular 5.0.0. This commit updates AIO to use the latest
implementation, with an appropriate configuration file that caches
the various AIO assets in useful ways.

PR Close #19795
2018-08-27 16:30:42 -04:00
66ffa360df test(docs-infra): correctly extract sitemap URLs (#19795)
`%%DEPLOYMENT_HOST%%` has been assumed to be the host prefix for sitemap
URLs since bf29936af, but afaict this was never the case.

PR Close #19795
2018-08-27 16:30:42 -04:00
9bcd8c2425 build(bazel): use value of /// <amd-module name=“”> directive to convert fileNameToModuleName in ngc-wrapped (#25650)
PR Close #25650
2018-08-27 12:21:18 -04:00
8fa099158e fix(ivy): allow queries for ng-container without read option (#25617)
PR Close #25617
2018-08-24 11:52:50 -04:00
b00038c847 fix(ivy): inject ViewContainerRef for directives on ng-container (#25617)
PR Close #25617
2018-08-24 11:52:50 -04:00
18f129f536 build: upgrade Chromium and ChromeDriver to latest versions (#25602)
PR Close #25602
2018-08-24 11:48:40 -04:00
efb453cb73 build: check for latest Chromium version (#25602)
Now that https://omahaproxy.appspot.com/all is back up, we can restore
the check for newer available version of Chromium.

Fixes #22231

PR Close #25602
2018-08-24 11:48:40 -04:00
658f49f650 docs(elements): fix typo (tranformation --> transformation) (#25600)
PR Close #25600
2018-08-23 16:54:49 -04:00
a37bcc3bfe feat(ivy): bridge component styles into the component renderer (#25255)
PR Close #25255
2018-08-23 16:51:15 -04:00
a59d4da304 build(bazel): fix bazel integration test after rules_typescript update (#25490)
PR Close #25490
2018-08-23 15:26:21 -04:00
22e7f7e99f build(bazel): update to rules_typescript 0.16.1 (#25490)
PR Close #25490
2018-08-23 15:26:21 -04:00
3d41739021 fix(aio): show aio-themed 404 page for unknown resources (#23188)
Fixes #23179

PR Close #23188
2018-08-23 15:25:47 -04:00
668bfcec9d fix(docs-infra): fix closure warning issue for improper internal flag (#25628)
PR Close #25628
2018-08-22 21:59:22 -04:00
eb1fe19088 fix(ivy): ngtsc directive compilation should use shared ConstantPool (#25620)
This fixes a bug in ngtsc where each @Directive was compiled using a
separate ConstantPool. This resulted in two issues:

* Directive constants were not shared across the file
* Extra statements from directive compilation were dropped instead of
added to the file

This commit fixes both issues and adds a test to verify @Directive is
working properly.

PR Close #25620
2018-08-22 21:14:54 -04:00
22d58fc89b build(bazel): remove workaround no longer needed for module names for ngfactory & ngsummary files (#25604)
Workaround was added in https://github.com/angular/angular/pull/25335. It was necessary for .ngfactory & .ngsummary files to have proper AMD module names starting with @angular when building angular downstream from source using Bazel. The underlying issue has been resolved in the compiler and these files now get proper AMD module names without the need for this workaround. The workaround had an unexpected consequence https://github.com/angular/angular-cli/issues/11835 which is fixed by its removal.

PR Close #25604
2018-08-22 21:11:18 -04:00
27e2039630 fix(compiler): update compiler to generate new slot allocations (#25607)
PR Close #25607
2018-08-22 21:08:39 -04:00
5c95b4b3a3 feat(ivy): support enum values in static resolution (#25619)
This commit adds support for enumeration values. An enumeration value
is now a first-class return value of the resolver, which provides both
a Reference to the enum type itself and the name of the value from that
enum. Resolving an enum itself returns a Map<string, EnumValue>.

PR Close #25619
2018-08-22 19:30:23 -04:00
61218f5f0b fix(ivy): ensure factory statements are emitted correctly (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
7500f0eafb feat(ivy): find all packages to be compiled by ngcc (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
68acc5b355 feat(ivy): compile all package formats in ngcc (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
7d3b70c2af fix(ivy): ngcc is resilient to bad source-map comments (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
7ce291c72a feat(ivy): implement Esm5ReflectionHost.getGenericArityOfClass() (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
6ae1e63c89 refactor(ivy): rename Esm2015ReflectionHost to Fesm2015ReflectionHost (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
c8baace554 test(ivy): add an integration test for ngcc (#25406)
This commit adds an integration test for ngcc, which runs ngcc
against most @angular packages. It does not yet make any assertions
on the result.

PR Close #25406
2018-08-22 19:28:56 -04:00
d33e0091df fix(ivy): emit generic types when needed in defs in .d.ts file (#25406)
Ivy definitions in .d.ts files often reference the type of a class.
Sometimes, those classes have generic type parameters. When this is
the case, ngtsc needs to emit generic type parameters in the .d.ts
files (usually by passing 'any').

PR Close #25406
2018-08-22 19:28:56 -04:00
b97d770e60 feat(ivy): add support for typings in ngcc (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
a3158bff27 refactor(ivy): minor re-organization of ngcc PackageTransformer#transform (#25406)
In preparation of adding support for transforming `.d.ts` files.

PR Close #25406
2018-08-22 19:28:56 -04:00
d6e91ba545 feat(ivy): support getting the corresponding .d.ts file in ngcc (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
cdb0215d0b test(ivy): clean up ngcc spec (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
396766104b refactor(ivy): rename spec files to match corresponding source files (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
e77f0fd6e6 refactor(ivy): export ngcc import prefix into constant (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
cdd4c9be63 feat(ivy): support custom prefix for imports in DtsFileTransformer (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
29705dd8f2 refactor(ivy): make ReflectionHost a parameter of Renderer (#25406)
PR Close #25406
2018-08-22 19:28:56 -04:00
ea68ba048a refactor(ivy): minor refactorings (#25406)
PR Close #25406
2018-08-22 19:28:55 -04:00
9081efa961 feat(ivy): enable processing of esm5 format in ngcc (#25406)
PR Close #25406
2018-08-22 19:28:55 -04:00
9e179cb311 fix(ivy): correctly detect classes in ngcc Esm5ReflectionHost (#25406)
PR Close #25406
2018-08-22 19:28:55 -04:00
3211432d2a feat(ivy): add support for esm2015 and esm5 in ngcc PackageParser (#25406)
Since non-flat module formats (esm2015, esm5) have different structure
than their flat counterparts (and since we are operating on JS files
inside `node_modules/`, we need to configure TS to include deeply nested
JS files (by specifying a sufficiently high `maxNodeModuleJsDepth`).

Remains to be determined if this has any (noticeable) performance
implications.

PR Close #25406
2018-08-22 19:28:55 -04:00
a7134dbc37 refactor(ivy): remove unused arg from ngcc Analyzer (#25406)
PR Close #25406
2018-08-22 19:28:55 -04:00
a528636f56 fix(ivy): allow FunctionExpression to indicate a method declaration (#25406)
In some code formats (e.g. ES5) methods can actually be function
expressions. For example:

```js
function MyClass() {}
// this static method is declared as a function expression
MyClass.staticMethod = function() { ... };
```

PR Close #25406
2018-08-22 19:28:55 -04:00
f87b499dde feat(ivy): implement getDefinitionOfFunction on ES2015 and ES5 reflection hosts (#25406)
PR Close #25406
2018-08-22 19:28:55 -04:00
a45f2bfb8f feat(ivy): use the ReflectionHost to resolve parameters and initializers (#25406)
ngtsc's static resolver can evaluate function calls where parameters
have default values. In TypeScript code these default values live on the
function definition, but in ES5 code the default values are represented
by statements in the function body.

A new ReflectionHost method getDefinitionOfFunction() abstracts over
this difference, and allows the static reflector to more accurately
evaluate ES5 code.

PR Close #25406
2018-08-22 19:28:55 -04:00
94332affd3 release: cut the v7.0.0-beta.3 release 2018-08-22 15:49:11 -07:00
52605aa2d8 docs: release notes for the v6.1.4 release 2018-08-22 15:43:55 -07:00
4e36f0cd68 build: remove NGBUILDS_IO_KEY now that it is not used any more (#25601)
This is a follow-up to #25536.

PR Close #25601
2018-08-22 15:59:14 -04:00
d5b70e0c66 fix(ivy): create LViewData from blueprint (#25587)
PR Close #25587
2018-08-22 15:58:42 -04:00
f33dbf42fd docs(changelog): add v6.1.3 release to main CHANGELOG.md (#25603)
Fixes #25565 #25513

PR Close #25603
2018-08-21 14:01:59 -07:00
6176974832 test(compiler-cli): fix failing test after bad merge (#25599)
PR Close #25599
2018-08-21 11:30:42 -07:00
69b57b2dca docs: Improve docs for downgrading a service (#19371)
PR Close #19371
2018-08-21 10:49:00 -07:00
831e71ea3c fix(ivy): host bindings should support array/object literals (#25583)
PR Close #25583
2018-08-21 10:48:42 -07:00
fc89479044 fix(router): default scroll position restoration to disabled (#25586)
Fixes #25145
FW-305 #resolve

PR Close #25586
2018-08-21 10:48:14 -07:00
f54f3856cb feat(ivy): add query inheritance (#25556)
Adds inheritance handling for the following:

- viewQuery
- contentQueries
- contentQueriesRefresh

PR Close #25556
2018-08-20 16:36:22 -07:00
c8b70ae8e4 build(bazel): Run build-packages-dist on RBE (#25237)
PR Close #25237
2018-08-20 16:34:45 -07:00
9ed3bd6b4f refactor(bazel): allow and ignore extra args for _ts_expected_outs (#25558)
This is needed to let ts_compile_actions take explicit list of srcs and deps to generate tsc actions from another rule. This is no-op for ngc for now.

PR Close #25558
2018-08-20 16:25:19 -07:00
11e2d9da1a feat(ivy): add support to template local refs in the compiler (#25576)
Fixes #23316

PR Close #25576
2018-08-20 16:24:56 -07:00
b05d4a5007 docs: fix typo in service worker getting started guide (#25512)
PR Close #25512
2018-08-20 11:09:52 -07:00
469d5e0448 docs: fix typo in reactive forms guide (#25543)
PR Close #25543
2018-08-20 11:08:32 -07:00
21a14407f6 refactor(ivy): generate vars in component defs (#25562)
PR Close #25562
2018-08-20 11:08:10 -07:00
d2be3d5775 docs: copy edit glossary (#25468)
PR Close #25468
2018-08-17 14:33:51 -07:00
f2aa9c6a7f refactor(ivy): use generated consts value to set binding index (#25533)
PR Close #25533
2018-08-17 14:32:55 -07:00
4708cb91ef refactor(ivy): remove reserveSlots instruction (#25533)
PR Close #25533
2018-08-17 14:32:55 -07:00
21d22ce4ad ci: ensure aio_preview job has needed node_modules (#25536)
PR Close #25536
2018-08-17 13:48:26 -07:00
e9026a5201 docs: fix script path reference (#25552)
PR Close #25552
2018-08-17 10:26:10 -07:00
f053a3f274 test(bazel): Remove --no-sandbox for protractor tests (#25362)
Since we no longer need this flag to run web tests inside a docker
container, this reverts https://github.com/angular/angular/pull/24906

PR Close #25362
2018-08-17 09:58:30 -07:00
31f0f5b3c3 feat(ivy): add support for local refs on ng-template (#25482)
PR Close #25482
2018-08-17 09:58:07 -07:00
07d8d3994c feat(router): add UrlSegment[] to CanLoad interface (#13127)
CanLoad now defines UrlSegment[] as a second parameter of the function.
Users can store the initial url segments and refer to them later, e.g. to go
back to the original url after authentication via router.navigate(urlSegments).
Existing code still works as before because the second function parameter
does not have to be defined.

Closes #12411

PR Close #13127
2018-08-16 16:32:18 -07:00
116946fb11 style: tslint enforces no debugger statements left behind (#25532)
PR Close #25532
2018-08-16 16:00:22 -07:00
503905c807 feat(ivy): add ngcc ivy switch (#25238)
Provides a runtime and compile time switch for ivy including
`ApplicationRef.bootstrapModule`.

This is done by naming the symbols such that `ngcc` (angular
Compatibility compiler) can rename symbols in such a way that running
`ngcc` command will switch the `@angular/core` module from `legacy` to
`ivy` mode.

This is done as follows:

```
const someToken__PRE_NGCC__ = ‘legacy mode’;
const someToken__POST_NGCC__ = ‘ivy mode’;

export someSymbol = someToken__PRE_NGCC__;
```

The `ngcc` will search for any token which ends with `__PRE_NGCC__`
and replace it with `__POST_NGCC__`. This allows the `@angular/core`
package to be rewritten to ivy mode post `ngcc` execution.

PR Close #25238
2018-08-16 13:51:42 -07:00
cc55d609ce docs: add HttpClientModule import code to services tutorial (#24854)
To be able to copy and paste.

PR Close #24854
2018-08-16 13:51:18 -07:00
de03abbd34 docs: reactive forms guide copy edits (#25417)
PR Close #25417
2018-08-16 13:50:51 -07:00
6482f6f0fe refactor(ivy): separate container into 2 instructions (#25509)
PR Close #25509
2018-08-16 13:47:14 -07:00
abcc430310 build(aio): update dgeni-packages to 0.26.3 to fix reference types issue (#25491)
PR Close #25491
2018-08-16 13:46:43 -07:00
9605456b66 build: refactor ambient node & jasmine types so they are only included where needed (#25491)
PR Close #25491
2018-08-16 13:46:43 -07:00
9ee6702fa9 refactor(ivy): remove short instruction names as they provide no value (#25493)
PR Close #25493
2018-08-16 11:04:34 -07:00
92c8752d0a docs(docs-infra): the build.sh script was renamed to create-image.sh 2018-08-16 10:26:13 +01:00
68bfe686d8 ci(docs-infra): rename 'upload-server' to 'preview-server'
The server no longer has files uploaded to it. Instead it is more
accurate to refer to it as dealing with "previews" of PRs.
2018-08-16 10:26:13 +01:00
d604ef7cf0 ci(docs-infra): add explicit return types to methods 2018-08-16 10:26:13 +01:00
36c4c8daa9 ci(docs-infra): improve preview-server logging 2018-08-16 10:26:13 +01:00
cc6f36a9d7 ci(docs-infra): change AIO preview server stuff to pull builds from CircleCI
Previously, Travis pushed the build artitfacts to the preview server.
This required us to use JWT to secure the POST request from Travis, to
ensure we couldn't receive malicious builds.

JWT has been deprecated and we are moving our builds to CircleCI.

This commit rewrites the TypeScript part of the preview server that
handles converting build artifact into hosted previews of the docs.
2018-08-16 10:26:13 +01:00
643766637e ci(docs-infra): factor out the aio-builds-setup environment variables 2018-08-16 10:26:12 +01:00
a800a5118a ci(docs-infra): move the payload-size check to the test job 2018-08-16 10:26:12 +01:00
3b8b7f4087 ci(docs-infra): add helper scripts for running TDD in Docker 2018-08-16 10:26:12 +01:00
9b820555a3 docs(docs-infra): update the preview server documentation 2018-08-16 10:26:12 +01:00
364459c576 ci(docs-infra): move AIO preview deployment to CircleCI
Now instead of pushing the AIO build artifacts to the preview server
from inside a Travis job, the artifacts are built and hosted on the
CircleCI infrastructure. The preview server will then pull these
down after being triggered by a CircleCI build webhook.
2018-08-16 10:26:11 +01:00
8347bb0d2d ci(docs-infra): update upload-server to run on node.js v10 2018-08-16 10:21:24 +01:00
524 changed files with 25881 additions and 13342 deletions

View File

@ -20,18 +20,6 @@ build --announce_rc
# We use this when uploading artifacts after the build finishes # We use this when uploading artifacts after the build finishes
build --symlink_prefix=dist/ build --symlink_prefix=dist/
# Enable experimental CircleCI bazel remote cache proxy
# See remote cache documentation in /docs/BAZEL.md
build --experimental_remote_spawn_cache --remote_rest_cache=http://localhost:7643
# Prevent unstable environment variables from tainting cache keys
build --experimental_strict_action_env
# Save downloaded repositories such as the go toolchain
# This directory can then be included in the CircleCI cache
# It should save time running the first build
build --experimental_repository_cache=/home/circleci/bazel_repository_cache
# Workaround https://github.com/bazelbuild/bazel/issues/3645 # Workaround https://github.com/bazelbuild/bazel/issues/3645
# Bazel doesn't calculate the memory ceiling correctly when running under Docker. # Bazel doesn't calculate the memory ceiling correctly when running under Docker.
# Limit Bazel to consuming resources that fit in CircleCI "xlarge" class # Limit Bazel to consuming resources that fit in CircleCI "xlarge" class

View File

@ -12,8 +12,8 @@
## IMPORTANT ## IMPORTANT
# If you change the `docker_image` version, also change the `cache_key` suffix and the version of # If you change the `docker_image` version, also change the `cache_key` suffix and the version of
# `com_github_bazelbuild_buildtools` in the `/WORKSPACE` file. # `com_github_bazelbuild_buildtools` in the `/WORKSPACE` file.
var_1: &docker_image angular/ngcontainer:0.3.3 var_1: &docker_image angular/ngcontainer:0.4.0
var_2: &cache_key v2-angular-{{ .Branch }}-{{ checksum "yarn.lock" }}-0.3.3 var_2: &cache_key v2-angular-{{ .Branch }}-{{ checksum "yarn.lock" }}-bust1-0.4.0
# Define common ENV vars # Define common ENV vars
var_3: &define_env_vars var_3: &define_env_vars
@ -148,6 +148,23 @@ jobs:
- run: bazel run @yarn//:yarn - run: bazel run @yarn//:yarn
- run: bazel query --output=label //... | xargs bazel test --define=compile=local --build_tag_filters=ivy-local --test_tag_filters=-manual,ivy-local - run: bazel query --output=label //... | xargs bazel test --define=compile=local --build_tag_filters=ivy-local --test_tag_filters=-manual,ivy-local
aio_preview:
<<: *job_defaults
environment:
AIO_SNAPSHOT_ARTIFACT_PATH: &aio_preview_artifact_path 'aio/tmp/snapshot.tgz'
steps:
- checkout:
<<: *post_checkout
- restore_cache:
key: *cache_key
- run: yarn install --frozen-lockfile --non-interactive
- run: ./aio/scripts/build-artifacts.sh $AIO_SNAPSHOT_ARTIFACT_PATH
- store_artifacts:
path: *aio_preview_artifact_path
# The `destination` needs to be kept in synch with the value of
# `AIO_ARTIFACT_PATH` in `aio/aio-builds-setup/Dockerfile`
destination: aio/dist/aio-snapshot.tgz
# This job exists only for backwards-compatibility with old scripts and tests # This job exists only for backwards-compatibility with old scripts and tests
# that rely on the pre-Bazel dist/packages-dist layout. # that rely on the pre-Bazel dist/packages-dist layout.
# It duplicates some work with the job above: we build the bazel packages # It duplicates some work with the job above: we build the bazel packages
@ -163,12 +180,20 @@ jobs:
- checkout: - checkout:
<<: *post_checkout <<: *post_checkout
# See remote cache documentation in /docs/BAZEL.md # See remote cache documentation in /docs/BAZEL.md
- run: .circleci/setup_cache.sh
- run: sudo cp .circleci/bazel.rc /etc/bazel.bazelrc - run: sudo cp .circleci/bazel.rc /etc/bazel.bazelrc
- *setup-bazel-remote-cache
- run: bazel run @nodejs//:yarn - run: bazel run @nodejs//:yarn
- run: scripts/build-packages-dist.sh - run:
# RBE is enabled by appending rbe-bazel.rc.
name: Enable RBE
command: 'sudo bash -c "cat .circleci/rbe-bazel.rc >> /etc/bazel.bazelrc"'
- run:
name: "Setup GCP environment"
command: 'openssl aes-256-cbc -d -in .circleci/gcp_token -k "${CIRCLE_PROJECT_REPONAME}" -out /home/circleci/.gcp_credentials'
- run:
name: build-packages-dist
command: scripts/build-packages-dist.sh
environment:
GOOGLE_APPLICATION_CREDENTIALS: /home/circleci/.gcp_credentials
# Save the npm packages from //packages/... for other workflow jobs to read # Save the npm packages from //packages/... for other workflow jobs to read
# https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs # https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
@ -245,6 +270,7 @@ workflows:
- test_ivy_jit - test_ivy_jit
- test_ivy_aot - test_ivy_aot
- build-packages-dist - build-packages-dist
- aio_preview
- integration_test: - integration_test:
requires: requires:
- build-packages-dist - build-packages-dist
@ -273,3 +299,6 @@ workflows:
branches: branches:
only: only:
- master - master
notify:
webhooks:
- url: https://ngbuilds.io/circle-build

BIN
.circleci/gcp_token Normal file

Binary file not shown.

77
.circleci/rbe-bazel.rc Normal file
View File

@ -0,0 +1,77 @@
# These options are enabled when running on CI with Remote Build Execution.
################################################################
# Toolchain related flags for remote build execution. #
################################################################
# Remote Build Execution requires a strong hash function, such as SHA256.
startup --host_jvm_args=-Dbazel.DigestFunction=SHA256
# Depending on how many machines are in the remote execution instance, setting
# this higher can make builds faster by allowing more jobs to run in parallel.
# Setting it too high can result in jobs that timeout, however, while waiting
# for a remote machine to execute them.
build --jobs=150
# Set several flags related to specifying the platform, toolchain and java
# properties.
# These flags are duplicated rather than imported from (for example)
# %workspace%/configs/ubuntu16_04_clang/1.0/toolchain.bazelrc to make this
# bazelrc a standalone file that can be copied more easily.
# These flags should only be used as is for the rbe-ubuntu16-04 container
# and need to be adapted to work with other toolchain containers.
build --host_javabase=@bazel_toolchains//configs/ubuntu16_04_clang/1.0:jdk8
build --javabase=@bazel_toolchains//configs/ubuntu16_04_clang/1.0:jdk8
build --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
build --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
build --crosstool_top=@bazel_toolchains//configs/ubuntu16_04_clang/1.0/bazel_0.15.0/default:toolchain
build --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
# Platform flags:
# The toolchain container used for execution is defined in the target indicated
# by "extra_execution_platforms", "host_platform" and "platforms".
# If you are using your own toolchain container, you need to create a platform
# target with "constraint_values" that allow for the toolchain specified with
# "extra_toolchains" to be selected (given constraints defined in
# "exec_compatible_with").
# More about platforms: https://docs.bazel.build/versions/master/platforms.html
build --extra_toolchains=@bazel_toolchains//configs/ubuntu16_04_clang/1.0/bazel_0.15.0/cpp:cc-toolchain-clang-x86_64-default
build --extra_execution_platforms=@bazel_toolchains//configs/ubuntu16_04_clang/1.0:rbe_ubuntu1604
build --host_platform=@bazel_toolchains//configs/ubuntu16_04_clang/1.0:rbe_ubuntu1604
build --platforms=@bazel_toolchains//configs/ubuntu16_04_clang/1.0:rbe_ubuntu1604
# Set various strategies so that all actions execute remotely. Mixing remote
# and local execution will lead to errors unless the toolchain and remote
# machine exactly match the host machine.
build --spawn_strategy=remote
build --strategy=Javac=remote
build --strategy=Closure=remote
build --genrule_strategy=remote
build --define=EXECUTOR=remote
# Enable the remote cache so action results can be shared across machines,
# developers, and workspaces.
build --remote_cache=remotebuildexecution.googleapis.com
# Enable remote execution so actions are performed on the remote systems.
build --remote_executor=remotebuildexecution.googleapis.com
# Remote instance.
build --remote_instance_name=projects/internal-200822/instances/default_instance
# Enable encryption.
build --tls_enabled=true
# Enforce stricter environment rules, which eliminates some non-hermetic
# behavior and therefore improves both the remote cache hit rate and the
# correctness and repeatability of the build.
build --experimental_strict_action_env=true
# Set a higher timeout value, just in case.
build --remote_timeout=3600
# Enable authentication. This will pick up application default credentials by
# default. You can use --auth_credentials=some_file.json to use a service
# account credential instead.
build --auth_enabled=true
# Do not accept remote cache.
build --remote_accept_cached=false

View File

@ -24,7 +24,6 @@
# petebacondarwin - Pete Bacon Darwin # petebacondarwin - Pete Bacon Darwin
# pkozlowski-opensource - Pawel Kozlowski # pkozlowski-opensource - Pawel Kozlowski
# robwormald - Rob Wormald # robwormald - Rob Wormald
# vicb - Victor Berchet
# vikerman - Vikram Subramanian # vikerman - Vikram Subramanian
@ -125,7 +124,6 @@ groups:
users: users:
- alexeagle - alexeagle
- mhevery - mhevery
- vicb
- IgorMinar #fallback - IgorMinar #fallback
core: core:
@ -197,7 +195,6 @@ groups:
- mhevery #primary - mhevery #primary
- jasonaden - jasonaden
- kara - kara
- vicb
- IgorMinar - IgorMinar
- jenniferfell #docs only - jenniferfell #docs only
@ -222,8 +219,7 @@ groups:
- "aio/content/guide/i18n.md" - "aio/content/guide/i18n.md"
- "aio/content/examples/i18n/*" - "aio/content/examples/i18n/*"
users: users:
- vicb #primary - alxhub #primary
- alxhub
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
- jenniferfell #docs only - jenniferfell #docs only
@ -235,7 +231,6 @@ groups:
- "aio/content/guide/aot-compiler.md" - "aio/content/guide/aot-compiler.md"
users: users:
- alxhub #primary - alxhub #primary
- vicb
- mhevery - mhevery
- IgorMinar #fallback - IgorMinar #fallback
- jenniferfell #docs only - jenniferfell #docs only
@ -260,7 +255,6 @@ groups:
users: users:
- alexeagle - alexeagle
- alxhub - alxhub
- vicb
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
@ -273,7 +267,6 @@ groups:
- "packages/common/http/*" - "packages/common/http/*"
users: users:
- pkozlowski-opensource #primary - pkozlowski-opensource #primary
- vicb
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
@ -322,7 +315,6 @@ groups:
users: users:
- kyliau #primary - kyliau #primary
# needs secondary # needs secondary
- vicb
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
- jenniferfell #docs only - jenniferfell #docs only
@ -336,7 +328,6 @@ groups:
- "aio/content/images/guide/router/*" - "aio/content/images/guide/router/*"
users: users:
- jasonaden #primary - jasonaden #primary
- vicb
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
- jenniferfell #docs only - jenniferfell #docs only
@ -379,10 +370,9 @@ groups:
files: files:
- "packages/platform-browser/*" - "packages/platform-browser/*"
users: users:
- vicb #primary - mhevery #primary
# needs secondary # needs secondary
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback
platform-server: platform-server:
conditions: conditions:
@ -393,7 +383,6 @@ groups:
users: users:
- vikerman #primary - vikerman #primary
- alxhub #secondary - alxhub #secondary
- vicb
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback - mhevery #fallback
- jenniferfell #docs only - jenniferfell #docs only
@ -403,10 +392,9 @@ groups:
files: files:
- "packages/platform-webworker/*" - "packages/platform-webworker/*"
users: users:
- vicb #primary - mhevery #primary
# needs secondary # needs secondary
- IgorMinar #fallback - IgorMinar #fallback
- mhevery #fallback
service-worker: service-worker:
conditions: conditions:

View File

@ -13,11 +13,7 @@ addons:
packages: packages:
# needed to install g++ that is used by npms's native modules # needed to install g++ that is used by npms's native modules
- g++-4.8 - g++-4.8
# https://docs.travis-ci.com/user/jwt
jwt:
# SAUCE_ACCESS_KEY<=secret for NGBUILDS_IO_KEY to work around travis-ci/travis-ci#7223, unencrypted value in valentine as NGBUILDS_IO_KEY>
# we alias NGBUILDS_IO_KEY to $SAUCE_ACCESS_KEY in env.sh and set the SAUCE_ACCESS_KEY there
- secure: "L7nrZwkAtFtYrP2DykPXgZvEKjkv0J/TwQ/r2QGxFTaBq4VZn+2Dw0YS7uCxoMqYzDwH0aAOqxoutibVpk8Z/16nE3tNmU5RzltMd6Xmt3qU2f/JDQLMo6PSlBodnjOUsDHJgmtrcbjhqrx/znA237BkNUu6UZRT7mxhXIZpn0U="
branches: branches:
except: except:
- g3 - g3

View File

@ -1,3 +1,92 @@
<a name="7.0.0-beta.5"></a>
# [7.0.0-beta.5](https://github.com/angular/angular/compare/7.0.0-beta.4...7.0.0-beta.5) (2018-09-06)
### Bug Fixes
* **bazel:** protractor rule should include *.e2e-spec.js ([#25701](https://github.com/angular/angular/issues/25701)) ([3809e0f](https://github.com/angular/angular/commit/3809e0f))
* **benchpress:** Use performance.mark() instead of console.time() ([#24114](https://github.com/angular/angular/issues/24114)) ([06d0400](https://github.com/angular/angular/commit/06d0400))
* **compiler:** add hostVars and support pure functions in host bindings ([#25626](https://github.com/angular/angular/issues/25626)) ([b424b31](https://github.com/angular/angular/commit/b424b31))
* **core:** do not clear element content when using shadow dom ([#24861](https://github.com/angular/angular/issues/24861)) ([6e828bb](https://github.com/angular/angular/commit/6e828bb))
* **core:** size regression with closure compiler ([#25531](https://github.com/angular/angular/issues/25531)) ([1f59f2f](https://github.com/angular/angular/commit/1f59f2f))
* **elements:** add compiler dependency ([#24861](https://github.com/angular/angular/issues/24861)) ([6143da6](https://github.com/angular/angular/commit/6143da6))
* **elements:** add compiler to integration ([#24861](https://github.com/angular/angular/issues/24861)) ([a080ffc](https://github.com/angular/angular/commit/a080ffc))
* **elements:** strict null checks ([#24861](https://github.com/angular/angular/issues/24861)) ([a8210d0](https://github.com/angular/angular/commit/a8210d0))
* **upgrade:** trigger `$destroy` event on upgraded component element ([#25357](https://github.com/angular/angular/issues/25357)) ([2a672a9](https://github.com/angular/angular/commit/2a672a9)), closes [#25334](https://github.com/angular/angular/issues/25334)
### Features
* **elements:** enable Shadow DOM v1 and slots ([#24861](https://github.com/angular/angular/issues/24861)) ([c9844a2](https://github.com/angular/angular/commit/c9844a2))
* **router:** warn if navigation triggered outside Angular zone ([#24959](https://github.com/angular/angular/issues/24959)) ([010e35d](https://github.com/angular/angular/commit/010e35d)), closes [#15770](https://github.com/angular/angular/issues/15770) [#15946](https://github.com/angular/angular/issues/15946) [#24728](https://github.com/angular/angular/issues/24728)
<a name="6.1.7"></a>
## [6.1.7](https://github.com/angular/angular/compare/6.1.6...6.1.7) (2018-09-06)
### Bug Fixes
* **bazel:** protractor rule should include *.e2e-spec.js ([#25701](https://github.com/angular/angular/issues/25701)) ([ed6b68b](https://github.com/angular/angular/commit/ed6b68b))
* **core:** size regression with closure compiler ([#25531](https://github.com/angular/angular/issues/25531)) ([ebcf762](https://github.com/angular/angular/commit/ebcf762))
* **docs-infra:** show "suggest edits" only for /guide and /tutorial dirs ([#24378](https://github.com/angular/angular/issues/24378)) ([66b7870](https://github.com/angular/angular/commit/66b7870))
* **upgrade:** trigger `$destroy` event on upgraded component element ([#25357](https://github.com/angular/angular/issues/25357)) ([82e0676](https://github.com/angular/angular/commit/82e0676)), closes [#25334](https://github.com/angular/angular/issues/25334)
### Features
* **docs-infra:** add "suggest edits" feature to all docs ([#24378](https://github.com/angular/angular/issues/24378)) ([82088a8](https://github.com/angular/angular/commit/82088a8))
* **docs-infra:** disable "status" selector in API list when displaying only packages ([#25718](https://github.com/angular/angular/issues/25718)) ([6f7df8a](https://github.com/angular/angular/commit/6f7df8a)), closes [#25708](https://github.com/angular/angular/issues/25708)
* **router:** warn if navigation triggered outside Angular zone ([#24959](https://github.com/angular/angular/issues/24959)) ([23a96dc](https://github.com/angular/angular/commit/23a96dc)), closes [#15770](https://github.com/angular/angular/issues/15770) [#15946](https://github.com/angular/angular/issues/15946) [#24728](https://github.com/angular/angular/issues/24728)
<a name="7.0.0-beta.4"></a>
# [7.0.0-beta.4](https://github.com/angular/angular/compare/7.0.0-beta.3...7.0.0-beta.4) (2018-08-29)
### Bug Fixes
* **bazel:** Cache fileNameToModuleName lookups ([#25731](https://github.com/angular/angular/issues/25731)) ([f394ba0](https://github.com/angular/angular/commit/f394ba0))
* **bazel:** move bazel managed runtime deps for downstream usage ([#25690](https://github.com/angular/angular/issues/25690)) ([6ed7993](https://github.com/angular/angular/commit/6ed7993))
* **bazel:** only lookup amd module-name tags in .d.ts files ([#25710](https://github.com/angular/angular/issues/25710)) ([42072c4](https://github.com/angular/angular/commit/42072c4))
* **compiler:** update compiler to generate new slot allocations ([#25607](https://github.com/angular/angular/issues/25607)) ([27e2039](https://github.com/angular/angular/commit/27e2039))
<a name="6.1.6"></a>
## [6.1.6](https://github.com/angular/angular/compare/6.1.5...6.1.6) (2018-08-29)
### Bug Fixes
* **bazel:** Cache fileNameToModuleName lookups ([#25731](https://github.com/angular/angular/issues/25731)) ([3e690e0](https://github.com/angular/angular/commit/3e690e0))
* **bazel:** only lookup amd module-name tags in .d.ts files ([#25710](https://github.com/angular/angular/issues/25710)) ([7aff364](https://github.com/angular/angular/commit/7aff364))
Note: the 6.1.5 release on npm accidentally glitched-out midway, so we cut 6.1.6 instead. sorry! :-)
<a name="7.0.0-beta.3"></a>
# [7.0.0-beta.3](https://github.com/angular/angular/compare/7.0.0-beta.2...7.0.0-beta.3) (2018-08-22)
### Features
* **router:** add UrlSegment[] to CanLoad interface ([#13127](https://github.com/angular/angular/issues/13127)) ([07d8d39](https://github.com/angular/angular/commit/07d8d39)), closes [#12411](https://github.com/angular/angular/issues/12411)
<a name="6.1.4"></a>
## [6.1.4](https://github.com/angular/angular/compare/6.1.3...6.1.4) (2018-08-22)
### Bug Fixes
* **router:** default scroll position restoration to disabled ([#25586](https://github.com/angular/angular/issues/25586)) ([7e61645](https://github.com/angular/angular/commit/7e61645)), closes [#25145](https://github.com/angular/angular/issues/25145)
<a name="7.0.0-beta.2"></a> <a name="7.0.0-beta.2"></a>
# [7.0.0-beta.2](https://github.com/angular/angular/compare/7.0.0-beta.1...7.0.0-beta.2) (2018-08-15) # [7.0.0-beta.2](https://github.com/angular/angular/compare/7.0.0-beta.1...7.0.0-beta.2) (2018-08-15)
@ -5,7 +94,15 @@
### Bug Fixes ### Bug Fixes
* **bazel:** correct type concatenated to devmode_js ([#25467](https://github.com/angular/angular/issues/25467)) ([fb2c524](https://github.com/angular/angular/commit/fb2c524)) * **bazel:** correct type concatenated to devmode_js ([#25467](https://github.com/angular/angular/issues/25467)) ([fb2c524](https://github.com/angular/angular/commit/fb2c524))
* **service-worker:** `Cache-Control: no-cache` on assets breaks service worker ([#25408](https://github.com/angular/angular/issues/25408)) ([01ec5fd](https://github.com/angular/angular/commit/01ec5fd)), closes [#25442](https://github.com/angular/angular/issues/25442)
<a name="6.1.3"></a>
## [6.1.3](https://github.com/angular/angular/compare/6.1.2...6.1.3) (2018-08-15)
### Bug Fixes
* **service-worker:** `Cache-Control: no-cache` on assets breaks service worker ([#25408](https://github.com/angular/angular/issues/25408)) ([1319ff4](https://github.com/angular/angular/commit/1319ff4)), closes [#25442](https://github.com/angular/angular/issues/25442)

View File

@ -3,46 +3,37 @@ workspace(name = "angular")
# #
# Download Bazel toolchain dependencies as needed by build actions # Download Bazel toolchain dependencies as needed by build actions
# #
http_archive( http_archive(
name = "build_bazel_rules_nodejs", name = "build_bazel_rules_nodejs",
urls = ["https://github.com/bazelbuild/rules_nodejs/archive/0.11.4.zip"], urls = ["https://github.com/bazelbuild/rules_nodejs/archive/0.12.0.zip"],
strip_prefix = "rules_nodejs-0.11.4", strip_prefix = "rules_nodejs-0.12.0",
sha256 = "c31c4ead696944a50fad2b3ee9dfbbeffe31a8dcca0b21b9bf5b3e6c6b069801", sha256 = "2977cdbc8ae0eed7d4186385af56a50a3321a549e2136a959998bba89d2edb6e",
)
http_archive(
name = "bazel_skylib",
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/0.3.1.zip"],
strip_prefix = "bazel-skylib-0.3.1",
sha256 = "95518adafc9a2b656667bbf517a952e54ce7f350779d0dd95133db4eb5c27fb1",
)
http_archive(
name = "io_bazel_rules_webtesting",
url = "https://github.com/bazelbuild/rules_webtesting/archive/0.2.1.zip",
strip_prefix = "rules_webtesting-0.2.1",
sha256 = "7d490aadff9b5262e5251fa69427ab2ffd1548422467cb9f9e1d110e2c36f0fa",
) )
http_archive( http_archive(
name = "build_bazel_rules_typescript", name = "build_bazel_rules_typescript",
url = "https://github.com/bazelbuild/rules_typescript/archive/0.16.0.zip", url = "https://github.com/bazelbuild/rules_typescript/archive/0.16.2.zip",
strip_prefix = "rules_typescript-0.16.0", strip_prefix = "rules_typescript-0.16.2",
sha256 = "e65c5639a42e2f6d3f9d2bda62487d6b42734830dda45be1620c3e2b1115070c", sha256 = "31601b777840fbf600dbd1893ade0d1de37166e7ba52b90735b107cfb67e38c7",
) )
load("@build_bazel_rules_typescript//:package.bzl", "rules_typescript_dependencies")
rules_typescript_dependencies()
http_archive( http_archive(
name = "io_bazel_rules_go", name = "bazel_toolchains",
url = "https://github.com/bazelbuild/rules_go/releases/download/0.10.3/rules_go-0.10.3.tar.gz", urls = [
sha256 = "feba3278c13cde8d67e341a837f69a029f698d7a27ddbb2a202be7a10b22142a", "https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/5124557861ebf4c0b67f98180bff1f8551e0b421.tar.gz",
"https://github.com/bazelbuild/bazel-toolchains/archive/5124557861ebf4c0b67f98180bff1f8551e0b421.tar.gz",
],
strip_prefix = "bazel-toolchains-5124557861ebf4c0b67f98180bff1f8551e0b421",
sha256 = "c3b08805602cd1d2b67ebe96407c1e8c6ed3d4ce55236ae2efe2f1948f38168d",
) )
http_archive( http_archive(
name = "io_bazel_rules_sass", name = "io_bazel_rules_sass",
url = "https://github.com/bazelbuild/rules_sass/archive/0.1.0.zip", url = "https://github.com/bazelbuild/rules_sass/archive/1.11.0.zip",
strip_prefix = "rules_sass-0.1.0", strip_prefix = "rules_sass-1.11.0",
sha256 = "b243c4d64f054c174051785862ab079050d90b37a1cef7da93821c6981cb9ad4", sha256 = "dbe9fb97d5a7833b2a733eebc78c9c1e3880f676ac8af16e58ccf2139cbcad03",
) )
# This commit matches the version of buildifier in angular/ngcontainer # This commit matches the version of buildifier in angular/ngcontainer
@ -88,9 +79,9 @@ http_archive(
http_archive( http_archive(
name = "org_brotli", name = "org_brotli",
url = "https://github.com/google/brotli/archive/f9b8c02673c576a3e807edbf3a9328e9e7af6d7c.zip", url = "https://github.com/google/brotli/archive/v1.0.5.zip",
strip_prefix = "brotli-f9b8c02673c576a3e807edbf3a9328e9e7af6d7c", strip_prefix = "brotli-1.0.5",
sha256 = "8a517806d2b7c8505ba5c53934e7d7c70d341b68ffd268e9044d35b564a48828", sha256 = "774b893a0700b0692a76e2e5b7e7610dbbe330ffbe3fe864b4b52ca718061d5a",
) )
# #
@ -113,9 +104,13 @@ local_repository(
# Load and install our dependencies downloaded above. # Load and install our dependencies downloaded above.
# #
load("@build_bazel_rules_nodejs//:defs.bzl", "check_bazel_version", "node_repositories", "yarn_install") load("@build_bazel_rules_nodejs//:defs.bzl", "check_bazel_version", "node_repositories")
check_bazel_version("0.15.0") check_bazel_version("0.16.0", """
If you are on a Mac and using Homebrew, there is a breaking change to the installation in Bazel 0.16
See https://blog.bazel.build/2018/08/22/bazel-homebrew.html
""")
node_repositories( node_repositories(
package_json = ["//:package.json"], package_json = ["//:package.json"],
preserve_symlinks = True, preserve_symlinks = True,
@ -142,24 +137,6 @@ load("@angular//:index.bzl", "ng_setup_workspace")
ng_setup_workspace() ng_setup_workspace()
#
# Ask Bazel to manage these toolchain dependencies for us.
# Bazel will run `yarn install` when one of these toolchains is requested during
# a build.
#
yarn_install(
name = "ts-api-guardian_runtime_deps",
package_json = "//tools/ts-api-guardian:package.json",
yarn_lock = "//tools/ts-api-guardian:yarn.lock",
)
yarn_install(
name = "http-server_runtime_deps",
package_json = "//tools/http-server:package.json",
yarn_lock = "//tools/http-server:yarn.lock",
)
################################## ##################################
# Skylark documentation generation # Skylark documentation generation

View File

@ -22,8 +22,8 @@ Here are the most important tasks you might need to use:
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary. * `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console. * `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
* `yarn lint` - check that the doc-viewer code follows our style rules. * `yarn lint` - check that the doc-viewer code follows our style rules.
* `yarn test` - run all the unit tests once. * `yarn test` - watch all the source files, for the doc-viewer, and run all the unit tests when any change.
* `yarn test --watch` - watch all the source files, for the doc-viewer, and run all the unit tests when any change. * `yarn test --watch=false` - run all the unit tests once.
* `yarn e2e` - run all the e2e tests for the doc-viewer. * `yarn e2e` - run all the e2e tests for the doc-viewer.
* `yarn docs` - generate all the docs from the source files. * `yarn docs` - generate all the docs from the source files.
@ -56,14 +56,9 @@ It's necessary to remove the temporary files, because otherwise they're displaye
## Using ServiceWorker locally ## Using ServiceWorker locally
Since abb36e3cb, running `yarn start --prod` will no longer set up the ServiceWorker, which Running `yarn start` (even when explicitly targeting production mode) does not set up the
would require manually running `yarn sw-manifest` and `yarn sw-copy` (something that is not possible ServiceWorker. If you want to test the ServiceWorker locally, you can use `yarn build` and then
with webpack serving the files from memory). serve the files in `dist/` with `yarn http-server dist -p 4200`.
If you want to test ServiceWorker locally, you can use `yarn build` and serve the files in `dist/`
with `yarn http-server dist -p 4200`.
For more details see #16745.
## Guide to authoring ## Guide to authoring

View File

@ -8,17 +8,24 @@ LABEL name="angular.io PR preview" \
VOLUME /aio-secrets VOLUME /aio-secrets
VOLUME /var/www/aio-builds VOLUME /var/www/aio-builds
VOLUME /dockerbuild
EXPOSE 80 443 EXPOSE 80 443
# Build-time args and env vars # Build-time args and env vars
# The AIO_ARTIFACT_PATH path needs to be kept in synch with the value of
# `aio_preview->steps->store_artifacts->destination` property in `.circleci/config.yml`
ARG AIO_ARTIFACT_PATH=aio/dist/aio-snapshot.tgz
ARG TEST_AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH
ARG AIO_BUILDS_DIR=/var/www/aio-builds ARG AIO_BUILDS_DIR=/var/www/aio-builds
ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds
ARG AIO_DOMAIN_NAME=ngbuilds.io ARG AIO_DOMAIN_NAME=ngbuilds.io
ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost
ARG AIO_GITHUB_ORGANIZATION=angular ARG AIO_GITHUB_ORGANIZATION=angular
ARG TEST_AIO_GITHUB_ORGANIZATION=angular ARG TEST_AIO_GITHUB_ORGANIZATION=test-org
ARG AIO_GITHUB_REPO=angular
ARG TEST_AIO_GITHUB_REPO=test-repo
ARG AIO_GITHUB_TEAM_SLUGS=team,aio-contributors ARG AIO_GITHUB_TEAM_SLUGS=team,aio-contributors
ARG TEST_AIO_GITHUB_TEAM_SLUGS=team,aio-contributors ARG TEST_AIO_GITHUB_TEAM_SLUGS=team,aio-contributors
ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME
@ -27,34 +34,36 @@ ARG AIO_NGINX_PORT_HTTP=80
ARG TEST_AIO_NGINX_PORT_HTTP=8080 ARG TEST_AIO_NGINX_PORT_HTTP=8080
ARG AIO_NGINX_PORT_HTTPS=443 ARG AIO_NGINX_PORT_HTTPS=443
ARG TEST_AIO_NGINX_PORT_HTTPS=4433 ARG TEST_AIO_NGINX_PORT_HTTPS=4433
ARG AIO_REPO_SLUG=angular/angular ARG AIO_SIGNIFICANT_FILES_PATTERN='^(?:aio|packages)/(?!.*[._]spec\\.[jt]s$)'
ARG TEST_AIO_REPO_SLUG=test-repo/test-slug ARG TEST_AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN
ARG AIO_TRUSTED_PR_LABEL="aio: preview" ARG AIO_TRUSTED_PR_LABEL="aio: preview"
ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview" ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview"
ARG AIO_UPLOAD_HOSTNAME=upload.localhost ARG AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
ARG TEST_AIO_UPLOAD_HOSTNAME=upload.localhost ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
ARG AIO_UPLOAD_MAX_SIZE=20971520 ARG AIO_ARTIFACT_MAX_SIZE=20971520
ARG TEST_AIO_UPLOAD_MAX_SIZE=20971520 ARG TEST_AIO_ARTIFACT_MAX_SIZE=200
ARG AIO_UPLOAD_PORT=3000 ARG AIO_PREVIEW_SERVER_PORT=3000
ARG TEST_AIO_UPLOAD_PORT=3001 ARG TEST_AIO_PREVIEW_SERVER_PORT=3001
ENV AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \ ENV AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH TEST_AIO_ARTIFACT_PATH=$TEST_AIO_ARTIFACT_PATH \
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \ AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \ AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \ AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \ AIO_GITHUB_REPO=$AIO_GITHUB_REPO TEST_AIO_GITHUB_REPO=$TEST_AIO_GITHUB_REPO \
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \ AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \ AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \ AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \ AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
AIO_REPO_SLUG=$AIO_REPO_SLUG TEST_AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG \ AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \ AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \ AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \ AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
AIO_UPLOAD_HOSTNAME=$AIO_UPLOAD_HOSTNAME TEST_AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME \ AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN TEST_AIO_SIGNIFICANT_FILES_PATTERN=$TEST_AIO_SIGNIFICANT_FILES_PATTERN \
AIO_UPLOAD_MAX_SIZE=$AIO_UPLOAD_MAX_SIZE TEST_AIO_UPLOAD_MAX_SIZE=$TEST_AIO_UPLOAD_MAX_SIZE \ AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
AIO_UPLOAD_PORT=$AIO_UPLOAD_PORT TEST_AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT \ AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME TEST_AIO_PREVIEW_SERVER_HOSTNAME=$TEST_AIO_PREVIEW_SERVER_HOSTNAME \
AIO_WWW_USER=www-data \ AIO_ARTIFACT_MAX_SIZE=$AIO_ARTIFACT_MAX_SIZE TEST_AIO_ARTIFACT_MAX_SIZE=$TEST_AIO_ARTIFACT_MAX_SIZE \
AIO_PREVIEW_SERVER_PORT=$AIO_PREVIEW_SERVER_PORT TEST_AIO_PREVIEW_SERVER_PORT=$TEST_AIO_PREVIEW_SERVER_PORT \
AIO_WWW_USER=www-data \
NODE_ENV=production NODE_ENV=production
@ -64,7 +73,7 @@ RUN mkdir /var/log/aio
# Add extra package sources # Add extra package sources
RUN apt-get update -y && apt-get install -y curl RUN apt-get update -y && apt-get install -y curl
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash - RUN curl --silent --show-error --location https://deb.nodesource.com/setup_10.x | bash -
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN echo "deb http://ftp.debian.org/debian jessie-backports main" | tee /etc/apt/sources.list.d/backports.list RUN echo "deb http://ftp.debian.org/debian jessie-backports main" | tee /etc/apt/sources.list.d/backports.list
@ -99,9 +108,9 @@ RUN printenv | grep AIO_ >> /etc/environment
# Set up dnsmasq # Set up dnsmasq
COPY dnsmasq/dnsmasq.conf /etc/ COPY dnsmasq/dnsmasq.conf /etc/
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
RUN sed -i "s|{{\$TEST_AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf RUN sed -i "s|{{\$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
# Set up SSL/TLS certificates # Set up SSL/TLS certificates
@ -125,9 +134,9 @@ RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
@ -136,9 +145,9 @@ RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/co
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$TEST_AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$TEST_AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
# Set up pm2 # Set up pm2

View File

@ -8,9 +8,9 @@ listen-address=127.0.0.1
# Force an IP address for these domains. # Force an IP address for these domains.
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1 address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
address=/{{$AIO_UPLOAD_HOSTNAME}}/127.0.0.1 address=/{{$AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1 address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
address=/{{$TEST_AIO_UPLOAD_HOSTNAME}}/127.0.0.1 address=/{{$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
# Run as root (required from inside docker container). # Run as root (required from inside docker container).
user=root user=root

View File

@ -1,4 +1,4 @@
/var/log/aio/upload-server-*.log { /var/log/aio/preview-server-*.log {
compress compress
copytruncate copytruncate
delaycompress delaycompress

View File

@ -66,24 +66,17 @@ server {
return 200 ''; return 200 '';
} }
# Upload builds # Notify about CircleCI builds
location "~^/create-build/(?<pr>[1-9][0-9]*)/(?<sha>[0-9a-f]{40})/?$" { location "~^/circle-build/?$" {
if ($request_method != "POST") { if ($request_method != "POST") {
add_header Allow "POST"; add_header Allow "POST";
return 405; return 405;
} }
client_body_temp_path /tmp/aio-create-builds;
client_body_buffer_size 128K;
client_max_body_size {{$AIO_UPLOAD_MAX_SIZE}};
client_body_in_file_only on;
proxy_pass_request_headers on; proxy_pass_request_headers on;
proxy_set_header X-FILE $request_body_file;
proxy_set_body off;
proxy_redirect off; proxy_redirect off;
proxy_method GET; proxy_method POST;
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri; proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
resolver 127.0.0.1; resolver 127.0.0.1;
} }
@ -98,7 +91,7 @@ server {
proxy_pass_request_headers on; proxy_pass_request_headers on;
proxy_redirect off; proxy_redirect off;
proxy_method POST; proxy_method POST;
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri; proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
resolver 127.0.0.1; resolver 127.0.0.1;
} }

View File

@ -3,29 +3,53 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as shell from 'shelljs'; import * as shell from 'shelljs';
import {HIDDEN_DIR_PREFIX} from '../common/constants'; import {HIDDEN_DIR_PREFIX} from '../common/constants';
import {GithubApi} from '../common/github-api';
import {GithubPullRequests} from '../common/github-pull-requests'; import {GithubPullRequests} from '../common/github-pull-requests';
import {assertNotMissingOrEmpty} from '../common/utils'; import {assertNotMissingOrEmpty, createLogger, getPrInfoFromDownloadPath} from '../common/utils';
// Classes // Classes
export class BuildCleaner { export class BuildCleaner {
private logger = createLogger('BuildCleaner');
// Constructor // Constructor
constructor(protected buildsDir: string, protected repoSlug: string, protected githubToken: string) { constructor(protected buildsDir: string, protected githubOrg: string, protected githubRepo: string,
protected githubToken: string, protected downloadsDir: string, protected artifactPath: string) {
assertNotMissingOrEmpty('buildsDir', buildsDir); assertNotMissingOrEmpty('buildsDir', buildsDir);
assertNotMissingOrEmpty('repoSlug', repoSlug); assertNotMissingOrEmpty('githubOrg', githubOrg);
assertNotMissingOrEmpty('githubRepo', githubRepo);
assertNotMissingOrEmpty('githubToken', githubToken); assertNotMissingOrEmpty('githubToken', githubToken);
assertNotMissingOrEmpty('downloadsDir', downloadsDir);
assertNotMissingOrEmpty('artifactPath', artifactPath);
} }
// Methods - Public // Methods - Public
public cleanUp(): Promise<void> { public async cleanUp(): Promise<void> {
return Promise.all([ try {
this.getExistingBuildNumbers(), this.logger.log('Cleaning up builds and downloads');
this.getOpenPrNumbers(), const openPrs = await this.getOpenPrNumbers();
]).then(([existingBuilds, openPrs]) => this.removeUnnecessaryBuilds(existingBuilds, openPrs)); this.logger.log(`Open pull requests: ${openPrs.length}`);
await Promise.all([
this.cleanBuilds(openPrs),
this.cleanDownloads(openPrs),
]);
} catch (error) {
this.logger.error('ERROR:', error);
}
} }
// Methods - Protected public async cleanBuilds(openPrs: number[]): Promise<void> {
protected getExistingBuildNumbers(): Promise<number[]> { const existingBuilds = await this.getExistingBuildNumbers();
return new Promise((resolve, reject) => { await this.removeUnnecessaryBuilds(existingBuilds, openPrs);
}
public async cleanDownloads(openPrs: number[]): Promise<void> {
const existingDownloads = await this.getExistingDownloads();
await this.removeUnnecessaryDownloads(existingDownloads, openPrs);
}
public getExistingBuildNumbers(): Promise<number[]> {
return new Promise<number[]>((resolve, reject) => {
fs.readdir(this.buildsDir, (err, files) => { fs.readdir(this.buildsDir, (err, files) => {
if (err) { if (err) {
return reject(err); return reject(err);
@ -41,31 +65,29 @@ export class BuildCleaner {
}); });
} }
protected getOpenPrNumbers(): Promise<number[]> { public async getOpenPrNumbers(): Promise<number[]> {
const githubPullRequests = new GithubPullRequests(this.githubToken, this.repoSlug); const api = new GithubApi(this.githubToken);
const githubPullRequests = new GithubPullRequests(api, this.githubOrg, this.githubRepo);
return githubPullRequests. const prs = await githubPullRequests.fetchAll('open');
fetchAll('open'). return prs.map(pr => pr.number);
then(prs => prs.map(pr => pr.number));
} }
protected removeDir(dir: string) { public removeDir(dir: string): void {
try { try {
if (shell.test('-d', dir)) { if (shell.test('-d', dir)) {
shell.chmod('-R', 'a+w', dir); shell.chmod('-R', 'a+w', dir);
shell.rm('-rf', dir); shell.rm('-rf', dir);
} }
} catch (err) { } catch (err) {
console.error(`ERROR: Unable to remove '${dir}' due to:`, err); this.logger.error(`ERROR: Unable to remove '${dir}' due to:`, err);
} }
} }
protected removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]) { public removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]): void {
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num)); const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
console.log(`Existing builds: ${existingBuildNumbers.length}`); this.logger.log(`Existing builds: ${existingBuildNumbers.length}`);
console.log(`Open pull requests: ${openPrNumbers.length}`); this.logger.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
// Try removing public dirs. // Try removing public dirs.
toRemove. toRemove.
@ -77,4 +99,29 @@ export class BuildCleaner {
map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))). map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))).
forEach(dir => this.removeDir(dir)); forEach(dir => this.removeDir(dir));
} }
public getExistingDownloads(): Promise<string[]> {
const artifactFile = path.basename(this.artifactPath);
return new Promise<string[]>((resolve, reject) => {
fs.readdir(this.downloadsDir, (err, files) => {
if (err) {
return reject(err);
}
files = files.filter(file => file.endsWith(artifactFile));
resolve(files);
});
});
}
public removeUnnecessaryDownloads(existingDownloads: string[], openPrNumbers: number[]): void {
const toRemove = existingDownloads.filter(filePath => {
const {pr} = getPrInfoFromDownloadPath(filePath);
return !openPrNumbers.includes(pr);
});
this.logger.log(`Existing downloads: ${existingDownloads.length}`);
this.logger.log(`Removing ${toRemove.length} download(s): ${toRemove.join(', ')}`);
toRemove.forEach(filePath => shell.rm(filePath));
}
} }

View File

@ -1,23 +1,26 @@
// Imports // Imports
import {getEnvVar} from '../common/utils'; import {AIO_DOWNLOADS_DIR} from '../common/constants';
import {
AIO_ARTIFACT_PATH,
AIO_BUILDS_DIR,
AIO_GITHUB_ORGANIZATION,
AIO_GITHUB_REPO,
AIO_GITHUB_TOKEN,
} from '../common/env-variables';
import {BuildCleaner} from './build-cleaner'; import {BuildCleaner} from './build-cleaner';
// Constants
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN', true);
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
// Run // Run
_main(); _main();
// Functions // Functions
function _main() { function _main(): void {
console.log(`[${new Date()}] - Cleaning up builds...`); const buildCleaner = new BuildCleaner(
AIO_BUILDS_DIR,
AIO_GITHUB_ORGANIZATION,
AIO_GITHUB_REPO,
AIO_GITHUB_TOKEN,
AIO_DOWNLOADS_DIR,
AIO_ARTIFACT_PATH);
const buildCleaner = new BuildCleaner(AIO_BUILDS_DIR, AIO_REPO_SLUG, AIO_GITHUB_TOKEN); buildCleaner.cleanUp().catch(() => process.exit(1));
buildCleaner.cleanUp().catch(err => {
console.error('ERROR:', err);
process.exit(1);
});
} }

View File

@ -0,0 +1,90 @@
// Imports
import fetch from 'node-fetch';
import {assertNotMissingOrEmpty} from './utils';
// Constants
const CIRCLE_CI_API_URL = 'https://circleci.com/api/v1.1/project/github';
// Interfaces - Types
export interface ArtifactInfo {
path: string;
pretty_path: string;
node_index: number;
url: string;
}
export type ArtifactResponse = ArtifactInfo[];
export interface BuildInfo {
reponame: string;
failed: boolean;
branch: string;
username: string;
build_num: number;
has_artifacts: boolean;
outcome: string; // e.g. 'success'
vcs_revision: string; // HEAD SHA
// there are other fields but they are not used in this code
}
/**
* A Helper that can interact with the CircleCI API.
*/
export class CircleCiApi {
private tokenParam = `circle-token=${this.circleCiToken}`;
/**
* Construct a helper that can interact with the CircleCI REST API.
* @param githubOrg The Github organisation whose repos we want to access in CircleCI (e.g. angular).
* @param githubRepo The Github repo whose builds we want to access in CircleCI (e.g. angular).
* @param circleCiToken The CircleCI API access token (secret).
*/
constructor(
private githubOrg: string,
private githubRepo: string,
private circleCiToken: string,
) {
assertNotMissingOrEmpty('githubOrg', githubOrg);
assertNotMissingOrEmpty('githubRepo', githubRepo);
assertNotMissingOrEmpty('circleCiToken', circleCiToken);
}
/**
* Get the info for a build from the CircleCI API
* @param buildNumber The CircleCI build number that generated the artifact.
* @returns A promise to the info about the build
*/
public async getBuildInfo(buildNumber: number): Promise<BuildInfo> {
try {
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
const response = await fetch(`${baseUrl}?${this.tokenParam}`);
if (response.status !== 200) {
throw new Error(`${baseUrl}: ${response.status} - ${response.statusText}`);
}
return response.json<BuildInfo>();
} catch (error) {
throw new Error(`CircleCI build info request failed (${error.message})`);
}
}
/**
* Query the CircleCI API to get a URL for a specified artifact from a specified build.
* @param artifactPath The path, within the build to the artifact.
* @returns A promise to the URL that can be requested to download the actual build artifact file.
*/
public async getBuildArtifactUrl(buildNumber: number, artifactPath: string): Promise<string> {
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
try {
const response = await fetch(`${baseUrl}/artifacts?${this.tokenParam}`);
const artifacts = await response.json<ArtifactResponse>();
const artifact = artifacts.find(item => item.path === artifactPath);
if (!artifact) {
throw new Error(`Missing artifact (${artifactPath}) for CircleCI build: ${buildNumber}`);
}
return artifact.url;
} catch (error) {
throw new Error(`CircleCI artifact URL request failed (${error.message})`);
}
}
}

View File

@ -1,3 +1,4 @@
// Constants // Constants
export const AIO_DOWNLOADS_DIR = '/tmp/aio-downloads';
export const HIDDEN_DIR_PREFIX = 'hidden--'; export const HIDDEN_DIR_PREFIX = 'hidden--';
export const SHORT_SHA_LEN = 7; export const SHORT_SHA_LEN = 7;

View File

@ -0,0 +1,19 @@
import {getEnvVar} from './utils';
export const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
export const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
export const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
export const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
export const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
export const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
export const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
export const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
export const AIO_NGINX_HOSTNAME = getEnvVar('AIO_NGINX_HOSTNAME');
export const AIO_NGINX_PORT_HTTP = +getEnvVar('AIO_NGINX_PORT_HTTP');
export const AIO_NGINX_PORT_HTTPS = +getEnvVar('AIO_NGINX_PORT_HTTPS');
export const AIO_SIGNIFICANT_FILES_PATTERN = getEnvVar('AIO_SIGNIFICANT_FILES_PATTERN');
export const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
export const AIO_PREVIEW_SERVER_HOSTNAME = getEnvVar('AIO_PREVIEW_SERVER_HOSTNAME');
export const AIO_PREVIEW_SERVER_PORT = +getEnvVar('AIO_PREVIEW_SERVER_PORT');
export const AIO_ARTIFACT_MAX_SIZE = +getEnvVar('AIO_ARTIFACT_MAX_SIZE');
export const AIO_WWW_USER = getEnvVar('AIO_WWW_USER');

View File

@ -28,29 +28,17 @@ export class GithubApi {
} }
// Methods - Public // Methods - Public
public get<T>(pathname: string, params?: RequestParamsOrNull): Promise<T> { public get<T = any>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
const path = this.buildPath(pathname, params); const path = this.buildPath(pathname, params);
return this.request<T>('get', path); return this.request<T>('get', path);
} }
public post<T>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> { public post<T = any>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
const path = this.buildPath(pathname, params); const path = this.buildPath(pathname, params);
return this.request<T>('post', path, data); return this.request<T>('post', path, data);
} }
// Methods - Protected public getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 0): Promise<T[]> {
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
if (params == null) {
return pathname;
}
const search = (params === null) ? '' : this.serializeSearchParams(params);
const joiner = search && '?';
return `${pathname}${joiner}${search}`;
}
protected getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 0): Promise<T[]> {
const perPage = 100; const perPage = 100;
const params = { const params = {
...baseParams, ...baseParams,
@ -67,6 +55,18 @@ export class GithubApi {
}); });
} }
// Methods - Protected
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
if (params == null) {
return pathname;
}
const search = (params === null) ? '' : this.serializeSearchParams(params);
const joiner = search && '?';
return `${pathname}${joiner}${search}`;
}
protected request<T>(method: string, path: string, data: any = null): Promise<T> { protected request<T>(method: string, path: string, data: any = null): Promise<T> {
return new Promise<T>((resolve, reject) => { return new Promise<T>((resolve, reject) => {
const options = { const options = {
@ -81,7 +81,7 @@ export class GithubApi {
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`); reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
}; };
const onSuccess = (responseText: string) => { const onSuccess = (responseText: string) => {
try { resolve(JSON.parse(responseText)); } catch (err) { reject(err); } try { resolve(responseText && JSON.parse(responseText)); } catch (err) { reject(err); }
}; };
const onResponse = (res: IncomingMessage) => { const onResponse = (res: IncomingMessage) => {
const statusCode = res.statusCode || -1; const statusCode = res.statusCode || -1;

View File

@ -1,46 +1,79 @@
// Imports
import {assertNotMissingOrEmpty} from '../common/utils';
import {GithubApi} from './github-api'; import {GithubApi} from './github-api';
import {assert, assertNotMissingOrEmpty} from './utils';
// Interfaces - Types export interface PullRequest {
export interface PullRequest {
number: number; number: number;
user: {login: string}; user: {login: string};
labels: {name: string}[]; labels: {name: string}[];
} }
export interface FileInfo {
sha: string;
filename: string;
}
export type PullRequestState = 'all' | 'closed' | 'open'; export type PullRequestState = 'all' | 'closed' | 'open';
// Classes /**
export class GithubPullRequests extends GithubApi { * Access pull requests on GitHub.
// Constructor */
constructor(githubToken: string, protected repoSlug: string) { export class GithubPullRequests {
super(githubToken); public repoSlug: string;
assertNotMissingOrEmpty('repoSlug', repoSlug);
/**
* Create an instance of this helper
* @param api An instance of the Github API helper.
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
* @param githubRepo The repository on Github with whose PRs we will interact.
*/
constructor(private api: GithubApi, githubOrg: string, githubRepo: string) {
assertNotMissingOrEmpty('githubOrg', githubOrg);
assertNotMissingOrEmpty('githubRepo', githubRepo);
this.repoSlug = `${githubOrg}/${githubRepo}`;
} }
// Methods - Public /**
public addComment(pr: number, body: string): Promise<void> { * Post a comment on a PR.
if (!(pr > 0)) { * @param pr The number of the PR on which to comment.
throw new Error(`Invalid PR number: ${pr}`); * @param body The body of the comment to post.
} else if (!body) { * @returns A promise that resolves when the comment has been posted.
throw new Error(`Invalid or empty comment body: ${body}`); */
} public addComment(pr: number, body: string): Promise<any> {
assert(pr > 0, `Invalid PR number: ${pr}`);
return this.post<void>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body}); assert(!!body, `Invalid or empty comment body: ${body}`);
return this.api.post<any>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
} }
/**
* Request information about a PR.
* @param pr The number of the PR for which to request info.
* @returns A promise that is resolves with information about the specified PR.
*/
public fetch(pr: number): Promise<PullRequest> { public fetch(pr: number): Promise<PullRequest> {
assert(pr > 0, `Invalid PR number: ${pr}`);
// Using the `/issues/` URL, because the `/pulls/` one does not provide labels. // Using the `/issues/` URL, because the `/pulls/` one does not provide labels.
return this.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`); return this.api.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
} }
/**
* Request information about all PRs that match the given state.
* @param state Only retrieve PRs that have this state.
* @returns A promise that is resolved with information about the requested PRs.
*/
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> { public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
console.log(`Fetching ${state} pull requests...`);
const pathname = `/repos/${this.repoSlug}/pulls`; const pathname = `/repos/${this.repoSlug}/pulls`;
const params = {state}; const params = {state};
return this.getPaginated<PullRequest>(pathname, params); return this.api.getPaginated<PullRequest>(pathname, params);
}
/**
* Request a list of files for the given PR.
* @param pr The number of the PR for which to request files.
* @returns A promise that resolves to an array of file information
*/
public fetchFiles(pr: number): Promise<FileInfo[]> {
assert(pr > 0, `Invalid PR number: ${pr}`);
return this.api.get<FileInfo[]>(`/repos/${this.repoSlug}/pulls/${pr}/files`);
} }
} }

View File

@ -1,45 +1,72 @@
// Imports
import {assertNotMissingOrEmpty} from '../common/utils';
import {GithubApi} from './github-api'; import {GithubApi} from './github-api';
import {assertNotMissingOrEmpty} from './utils';
// Interfaces - Types export interface Team {
interface Team {
id: number; id: number;
slug: string; slug: string;
} }
interface TeamMembership { export interface TeamMembership {
state: string; state: string;
} }
// Classes export class GithubTeams {
export class GithubTeams extends GithubApi { /**
// Constructor * Create an instance of this helper
constructor(githubToken: string, protected organization: string) { * @param api An instance of the Github API helper.
super(githubToken); * @param githubOrg The organisation on GitHub whose repo we will interrogate.
assertNotMissingOrEmpty('organization', organization); */
constructor(private api: GithubApi, protected githubOrg: string) {
assertNotMissingOrEmpty('githubOrg', githubOrg);
} }
// Methods - Public /**
* Request information about all the organisation's teams in GitHub.
* @returns A promise that is resolved with information about the teams.
*/
public fetchAll(): Promise<Team[]> { public fetchAll(): Promise<Team[]> {
return this.getPaginated<Team>(`/orgs/${this.organization}/teams`); return this.api.getPaginated<Team>(`/orgs/${this.githubOrg}/teams`);
} }
public isMemberById(username: string, teamIds: number[]): Promise<boolean> { /**
const getMembership = (teamId: number) => * Check whether the specified username is a member of the specified team.
this.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`). * @param username The usernane to check for in the team.
then(membership => membership.state === 'active'). * @param teamIds The team to check for the username.
catch(() => false); * @returns a Promise that resolves to `true` if the username is a member of the team.
const reduceFn = (promise: Promise<boolean>, teamId: number) => */
promise.then(isMember => isMember || getMembership(teamId)); public async isMemberById(username: string, teamIds: number[]): Promise<boolean> {
return teamIds.reduce(reduceFn, Promise.resolve(false)); const getMembership = async (teamId: number) => {
try {
const {state} = await this.api.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`);
return state === 'active';
} catch (error) {
return false;
}
};
for (const teamId of teamIds) {
if (await getMembership(teamId)) {
return true;
}
}
return false;
} }
public isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> { /**
return this.fetchAll(). * Check whether the given username is a member of the teams specified by the team slugs.
then(teams => teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id)). * @param username The username to check for in the teams.
then(teamIds => this.isMemberById(username, teamIds)). * @param teamSlugs A collection of slugs that represent the teams to check for the the username.
catch(() => false); * @returns a Promise that resolves to `true` if the usernane is a member of at least one of the specified teams.
*/
public async isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
try {
const teams = await this.fetchAll();
const teamIds = teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id);
return await this.isMemberById(username, teamIds);
} catch (error) {
return false;
}
} }
} }

View File

@ -1,17 +1,85 @@
// Functions import {basename, resolve as resolvePath} from 'path';
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => { import {SHORT_SHA_LEN} from './constants';
/**
* Shorten a SHA to make it more readable
* @param sha The SHA to shorten.
*/
export function computeShortSha(sha: string) {
return sha.substr(0, SHORT_SHA_LEN);
}
/**
* Compute the path for a downloaded artifact file.
* @param downloadsDir The directory where artifacts are downloaded
* @param pr The PR associated with this artifact.
* @param sha The SHA associated with the build for this artifact.
* @param artifactPath The path to the artifact on CircleCI.
* @returns The fully resolved location for the specified downloaded artifact.
*/
export function computeArtifactDownloadPath(downloadsDir: string, pr: number, sha: string, artifactPath: string) {
return resolvePath(downloadsDir, `${pr}-${computeShortSha(sha)}-${basename(artifactPath)}`);
}
/**
* Extract the PR number and latest commit SHA from a downloaded file path.
* @param downloadPath the path to the downloaded file.
* @returns An object whose keys are the PR and SHA extracted from the file path.
*/
export function getPrInfoFromDownloadPath(downloadPath: string) {
const file = basename(downloadPath);
const [pr, sha] = file.split('-');
return {pr: +pr, sha};
}
/**
* Assert that a value is true.
* @param value The value to assert.
* @param message The message if the value is not true.
*/
export function assert(value: boolean, message: string) {
if (!value) { if (!value) {
throw new Error(`Missing or empty required parameter '${name}'!`); throw new Error(message);
} }
}
/**
* Assert that a parameter is not equal to "".
* @param name The name of the parameter.
* @param value The value of the parameter.
*/
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => {
assert(!!value, `Missing or empty required parameter '${name}'!`);
}; };
/**
* Get an environment variable.
* @param name The name of the environment variable.
* @param isOptional True if the variable is optional.
* @returns The value of the variable or "" if it is optional and falsy.
* @throws `Error` if the variable is falsy and not optional.
*/
export const getEnvVar = (name: string, isOptional = false): string => { export const getEnvVar = (name: string, isOptional = false): string => {
const value = process.env[name]; const value = process.env[name];
if (!isOptional && !value) { if (!isOptional && !value) {
console.error(`ERROR: Missing required environment variable '${name}'!`); try {
process.exit(1); throw new Error(`ERROR: Missing required environment variable '${name}'!`);
} catch (error) {
console.error(error.stack);
process.exit(1);
}
} }
return value || ''; return value || '';
}; };
export function createLogger(scope: string) {
const padding = ' '.repeat(20 - scope.length);
return {
error: (...args: any[]) => console.error(`[${new Date()}]`, `${scope}:${padding}`, ...args),
info: (...args: any[]) => console.info(`[${new Date()}]`, `${scope}:${padding}`, ...args),
log: (...args: any[]) => console.log(`[${new Date()}]`, `${scope}:${padding}`, ...args),
warn: (...args: any[]) => console.warn(`[${new Date()}]`, `${scope}:${padding}`, ...args),
};
}

View File

@ -4,13 +4,16 @@ import {EventEmitter} from 'events';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as shell from 'shelljs'; import * as shell from 'shelljs';
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants'; import {HIDDEN_DIR_PREFIX} from '../common/constants';
import {assertNotMissingOrEmpty} from '../common/utils'; import {assertNotMissingOrEmpty, computeShortSha, createLogger} from '../common/utils';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events'; import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
import {UploadError} from './upload-error'; import {PreviewServerError} from './preview-error';
// Classes // Classes
export class BuildCreator extends EventEmitter { export class BuildCreator extends EventEmitter {
private logger = createLogger('BuildCreator');
// Constructor // Constructor
constructor(protected buildsDir: string) { constructor(protected buildsDir: string) {
super(); super();
@ -18,9 +21,9 @@ export class BuildCreator extends EventEmitter {
} }
// Methods - Public // Methods - Public
public create(pr: string, sha: string, archivePath: string, isPublic: boolean): Promise<void> { public create(pr: number, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
// Use only part of the SHA for more readable URLs. // Use only part of the SHA for more readable URLs.
sha = sha.substr(0, SHORT_SHA_LEN); sha = computeShortSha(sha);
const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic); const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic);
const shaDir = path.join(prDir, sha); const shaDir = path.join(prDir, sha);
@ -33,7 +36,7 @@ export class BuildCreator extends EventEmitter {
then(([prDirExisted, shaDirExisted]) => { then(([prDirExisted, shaDirExisted]) => {
if (shaDirExisted) { if (shaDirExisted) {
const publicOrNot = isPublic ? 'public' : 'non-public'; const publicOrNot = isPublic ? 'public' : 'non-public';
throw new UploadError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`); throw new PreviewServerError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
} }
dirToRemoveOnError = prDirExisted ? shaDir : prDir; dirToRemoveOnError = prDirExisted ? shaDir : prDir;
@ -49,15 +52,15 @@ export class BuildCreator extends EventEmitter {
shell.rm('-rf', dirToRemoveOnError); shell.rm('-rf', dirToRemoveOnError);
} }
if (!(err instanceof UploadError)) { if (!(err instanceof PreviewServerError)) {
err = new UploadError(500, `Error while uploading to directory: ${shaDir}\n${err}`); err = new PreviewServerError(500, `Error while creating preview at: ${shaDir}\n${err}`);
} }
throw err; throw err;
}); });
} }
public updatePrVisibility(pr: string, makePublic: boolean): Promise<boolean> { public updatePrVisibility(pr: number, makePublic: boolean): Promise<boolean> {
const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic); const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic);
return Promise. return Promise.
@ -68,7 +71,8 @@ export class BuildCreator extends EventEmitter {
return false; return false;
} else if (targetVisPrDirExisted) { } else if (targetVisPrDirExisted) {
// Error: Directories for both visibilities exist. // Error: Directories for both visibilities exist.
throw new UploadError(409, `Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`); throw new PreviewServerError(409,
`Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
} }
// Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`. // Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`.
@ -79,8 +83,8 @@ export class BuildCreator extends EventEmitter {
then(() => true); then(() => true);
}). }).
catch(err => { catch(err => {
if (!(err instanceof UploadError)) { if (!(err instanceof PreviewServerError)) {
err = new UploadError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`); err = new PreviewServerError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
} }
throw err; throw err;
@ -102,7 +106,7 @@ export class BuildCreator extends EventEmitter {
} }
if (stderr) { if (stderr) {
console.warn(stderr); this.logger.warn(stderr);
} }
try { try {
@ -116,9 +120,9 @@ export class BuildCreator extends EventEmitter {
}); });
} }
protected getCandidatePrDirs(pr: string, isPublic: boolean) { protected getCandidatePrDirs(pr: number, isPublic: boolean): {oldPrDir: string, newPrDir: string} {
const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr); const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr);
const publicPrDir = path.join(this.buildsDir, pr); const publicPrDir = path.join(this.buildsDir, `${pr}`);
const oldPrDir = isPublic ? hiddenPrDir : publicPrDir; const oldPrDir = isPublic ? hiddenPrDir : publicPrDir;
const newPrDir = isPublic ? publicPrDir : hiddenPrDir; const newPrDir = isPublic ? publicPrDir : hiddenPrDir;

View File

@ -0,0 +1,83 @@
import * as fs from 'fs';
import fetch from 'node-fetch';
import {dirname} from 'path';
import {mkdir} from 'shelljs';
import {promisify} from 'util';
import {CircleCiApi} from '../common/circle-ci-api';
import {assert, assertNotMissingOrEmpty, computeArtifactDownloadPath, createLogger} from '../common/utils';
import {PreviewServerError} from './preview-error';
export interface GithubInfo {
org: string;
pr: number;
repo: string;
sha: string;
success: boolean;
}
/**
* A helper that can get information about builds and download build artifacts.
*/
export class BuildRetriever {
private logger = createLogger('BuildRetriever');
constructor(private api: CircleCiApi, private downloadSizeLimit: number, private downloadDir: string) {
assert(downloadSizeLimit > 0, 'Invalid parameter "downloadSizeLimit" should be a number greater than 0.');
assertNotMissingOrEmpty('downloadDir', downloadDir);
}
/**
* Get GitHub information about a build
* @param buildNum The number of the build for which to retrieve the info.
* @returns The Github org, repo, PR and latest SHA for the specified build.
*/
public async getGithubInfo(buildNum: number): Promise<GithubInfo> {
const buildInfo = await this.api.getBuildInfo(buildNum);
const githubInfo: GithubInfo = {
org: buildInfo.username,
pr: getPrfromBranch(buildInfo.branch),
repo: buildInfo.reponame,
sha: buildInfo.vcs_revision,
success: !buildInfo.failed,
};
return githubInfo;
}
/**
* Make a request to the given URL for a build artifact and store it locally.
* @param buildNum the number of the CircleCI build whose artifact we want to download.
* @param pr the number of the PR that triggered the CircleCI build.
* @param sha the commit in the PR that triggered the CircleCI build.
* @param artifactPath the path on CircleCI where the artifact was stored.
* @returns A promise to the file path where the downloaded file was stored.
*/
public async downloadBuildArtifact(buildNum: number, pr: number, sha: string, artifactPath: string): Promise<string> {
try {
const outPath = computeArtifactDownloadPath(this.downloadDir, pr, sha, artifactPath);
const downloadExists = await new Promise(resolve => fs.exists(outPath, exists => resolve(exists)));
if (!downloadExists) {
const url = await this.api.getBuildArtifactUrl(buildNum, artifactPath);
const response = await fetch(url, {size: this.downloadSizeLimit});
if (response.status !== 200) {
throw new PreviewServerError(response.status, `Error ${response.status} - ${response.statusText}`);
}
const buffer = await response.buffer();
mkdir('-p', dirname(outPath));
await promisify(fs.writeFile)(outPath, buffer);
}
return outPath;
} catch (error) {
this.logger.warn(error);
const status = (error.type === 'max-size') ? 413 : 500;
throw new PreviewServerError(status, `CircleCI artifact download failed (${error.message || error})`);
}
}
}
function getPrfromBranch(branch: string): number {
// CircleCI only exposes PR numbers via the `branch` field :-(
const match = /^pull\/(\d+)$/.exec(branch);
if (!match) {
throw new Error(`No PR found in branch field: ${branch}`);
}
return +match[1];
}

View File

@ -0,0 +1,46 @@
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
import {GithubTeams} from '../common/github-teams';
import {assertNotMissingOrEmpty} from '../common/utils';
/**
* A helper to verify whether builds are trusted.
*/
export class BuildVerifier {
/**
* Construct a new BuildVerifier instance.
* @param prs A helper to access PR information.
* @param teams A helper to access Github team information.
* @param allowedTeamSlugs The teams that are trusted.
* @param trustedPrLabel The github label that indicates that a PR is trusted.
*/
constructor(protected prs: GithubPullRequests, protected teams: GithubTeams,
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
}
/**
* Check whether a PR contains files that are significant to the build.
* @param pr The number of the PR to check
* @param significantFilePattern A regex that selects files that are significant.
*/
public async getSignificantFilesChanged(pr: number, significantFilePattern: RegExp): Promise<boolean> {
const files = await this.prs.fetchFiles(pr);
return files.some(file => significantFilePattern.test(file.filename));
}
/**
* Check whether a PR is trusted.
* @param pr The number of the PR to check.
* @returns true if the PR is trusted.
*/
public async getPrIsTrusted(pr: number): Promise<boolean> {
const prInfo = await this.prs.fetch(pr);
return this.hasLabel(prInfo, this.trustedPrLabel) ||
(await this.teams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
}
protected hasLabel(prInfo: PullRequest, label: string): boolean {
return prInfo.labels.some(labelObj => labelObj.name === label);
}
}

View File

@ -0,0 +1,41 @@
// Imports
import {AIO_DOWNLOADS_DIR} from '../common/constants';
import {
AIO_ARTIFACT_MAX_SIZE,
AIO_ARTIFACT_PATH,
AIO_BUILDS_DIR,
AIO_CIRCLE_CI_TOKEN,
AIO_DOMAIN_NAME,
AIO_GITHUB_ORGANIZATION,
AIO_GITHUB_REPO,
AIO_GITHUB_TEAM_SLUGS,
AIO_GITHUB_TOKEN,
AIO_PREVIEW_SERVER_HOSTNAME,
AIO_PREVIEW_SERVER_PORT,
AIO_SIGNIFICANT_FILES_PATTERN,
AIO_TRUSTED_PR_LABEL,
} from '../common/env-variables';
import {PreviewServerFactory} from './preview-server-factory';
// Run
_main();
// Functions
function _main(): void {
PreviewServerFactory
.create({
buildArtifactPath: AIO_ARTIFACT_PATH,
buildsDir: AIO_BUILDS_DIR,
circleCiToken: AIO_CIRCLE_CI_TOKEN,
domainName: AIO_DOMAIN_NAME,
downloadSizeLimit: AIO_ARTIFACT_MAX_SIZE,
downloadsDir: AIO_DOWNLOADS_DIR,
githubOrg: AIO_GITHUB_ORGANIZATION,
githubRepo: AIO_GITHUB_REPO,
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
githubToken: AIO_GITHUB_TOKEN,
significantFilesPattern: AIO_SIGNIFICANT_FILES_PATTERN,
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
})
.listen(AIO_PREVIEW_SERVER_PORT, AIO_PREVIEW_SERVER_HOSTNAME);
}

View File

@ -1,8 +1,8 @@
// Classes // Classes
export class UploadError extends Error { export class PreviewServerError extends Error {
// Constructor // Constructor
constructor(public status: number = 500, message?: string) { constructor(public status: number = 500, message?: string) {
super(message); super(message);
Object.setPrototypeOf(this, UploadError.prototype); Object.setPrototypeOf(this, PreviewServerError.prototype);
} }
} }

View File

@ -0,0 +1,183 @@
// Imports
import * as bodyParser from 'body-parser';
import * as express from 'express';
import * as http from 'http';
import {CircleCiApi} from '../common/circle-ci-api';
import {GithubApi} from '../common/github-api';
import {GithubPullRequests} from '../common/github-pull-requests';
import {GithubTeams} from '../common/github-teams';
import {assert, assertNotMissingOrEmpty, createLogger} from '../common/utils';
import {BuildCreator} from './build-creator';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
import {BuildRetriever} from './build-retriever';
import {BuildVerifier} from './build-verifier';
import {respondWithError, throwRequestError} from './utils';
const AIO_PREVIEW_JOB = 'aio_preview';
// Interfaces - Types
export interface PreviewServerConfig {
downloadsDir: string;
downloadSizeLimit: number;
buildArtifactPath: string;
buildsDir: string;
domainName: string;
githubOrg: string;
githubRepo: string;
githubTeamSlugs: string[];
circleCiToken: string;
githubToken: string;
significantFilesPattern: string;
trustedPrLabel: string;
}
const logger = createLogger('PreviewServer');
// Classes
export class PreviewServerFactory {
// Methods - Public
public static create(cfg: PreviewServerConfig): http.Server {
assertNotMissingOrEmpty('domainName', cfg.domainName);
const circleCiApi = new CircleCiApi(cfg.githubOrg, cfg.githubRepo, cfg.circleCiToken);
const githubApi = new GithubApi(cfg.githubToken);
const prs = new GithubPullRequests(githubApi, cfg.githubOrg, cfg.githubRepo);
const teams = new GithubTeams(githubApi, cfg.githubOrg);
const buildRetriever = new BuildRetriever(circleCiApi, cfg.downloadSizeLimit, cfg.downloadsDir);
const buildVerifier = new BuildVerifier(prs, teams, cfg.githubTeamSlugs, cfg.trustedPrLabel);
const buildCreator = PreviewServerFactory.createBuildCreator(prs, cfg.buildsDir, cfg.domainName);
const middleware = PreviewServerFactory.createMiddleware(buildRetriever, buildVerifier, buildCreator, cfg);
const httpServer = http.createServer(middleware as any);
httpServer.on('listening', () => {
const info = httpServer.address();
logger.info(`Up and running (and listening on ${info.address}:${info.port})...`);
});
return httpServer;
}
public static createMiddleware(buildRetriever: BuildRetriever, buildVerifier: BuildVerifier,
buildCreator: BuildCreator, cfg: PreviewServerConfig): express.Express {
const middleware = express();
const jsonParser = bodyParser.json();
// RESPOND TO IS-ALIVE PING
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
// CIRCLE_CI BUILD COMPLETE WEBHOOK
middleware.post(/^\/circle-build\/?$/, jsonParser, async (req, res) => {
try {
if (!(
req.is('json') &&
req.body &&
req.body.payload &&
req.body.payload.build_num > 0 &&
req.body.payload.build_parameters &&
req.body.payload.build_parameters.CIRCLE_JOB
)) {
throwRequestError(400, `Incorrect body content. Expected JSON`, req);
}
const job = req.body.payload.build_parameters.CIRCLE_JOB;
const buildNum = req.body.payload.build_num;
logger.log(`Build:${buildNum}, Job:${job} - processing web-hook trigger`);
if (job !== AIO_PREVIEW_JOB) {
res.sendStatus(204);
logger.log(`Build:${buildNum}, Job:${job} -`,
`Skipping preview processing because this is not the "${AIO_PREVIEW_JOB}" job.`);
return;
}
const { pr, sha, org, repo, success } = await buildRetriever.getGithubInfo(buildNum);
if (!success) {
res.sendStatus(204);
logger.log(`PR:${pr}, Build:${buildNum} - Skipping preview processing because this build did not succeed.`);
return;
}
assert(cfg.githubOrg === org,
`Invalid webhook: expected "githubOrg" property to equal "${cfg.githubOrg}" but got "${org}".`);
assert(cfg.githubRepo === repo,
`Invalid webhook: expected "githubRepo" property to equal "${cfg.githubRepo}" but got "${repo}".`);
// Do not deploy unless this PR has touched relevant files: `aio/` or `packages/` (except for spec files)
if (!await buildVerifier.getSignificantFilesChanged(pr, new RegExp(cfg.significantFilesPattern))) {
res.sendStatus(204);
logger.log(`PR:${pr}, Build:${buildNum} - ` +
`Skipping preview processing because this PR did not touch any significant files.`);
return;
}
const artifactPath = await buildRetriever.downloadBuildArtifact(buildNum, pr, sha, cfg.buildArtifactPath);
const isPublic = await buildVerifier.getPrIsTrusted(pr);
await buildCreator.create(pr, sha, artifactPath, isPublic);
res.sendStatus(isPublic ? 201 : 202);
} catch (err) {
logger.error('CircleCI webhook error', err);
respondWithError(res, err);
}
});
// GITHUB PR UPDATED WEBHOOK
middleware.post(/^\/pr-updated\/?$/, jsonParser, async (req, res) => {
const { action, number: prNo }: { action?: string, number?: number } = req.body;
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
try {
if (!visMayHaveChanged) {
res.sendStatus(200);
} else if (!prNo) {
throwRequestError(400, `Missing or empty 'number' field`, req);
} else {
const isPublic = await buildVerifier.getPrIsTrusted(prNo);
await buildCreator.updatePrVisibility(prNo, isPublic);
res.sendStatus(200);
}
} catch (err) {
logger.error('PR update hook error', err);
respondWithError(res, err);
}
});
// ALL OTHER REQUESTS
middleware.all('*', req => throwRequestError(404, 'Unknown resource', req));
middleware.use((err: any, _req: any, res: express.Response, _next: any) => {
const statusText = http.STATUS_CODES[err.status] || '???';
logger.error(`Preview server error: ${err.status} - ${statusText}:`, err.message);
respondWithError(res, err);
});
return middleware;
}
public static createBuildCreator(prs: GithubPullRequests, buildsDir: string, domainName: string): BuildCreator {
const buildCreator = new BuildCreator(buildsDir);
const postPreviewsComment = (pr: number, shas: string[]) => {
const body = shas.
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
join('\n');
return prs.addComment(pr, body);
};
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
if (isPublic) {
postPreviewsComment(pr, [sha]);
}
});
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
if (isPublic && shas.length) {
postPreviewsComment(pr, shas);
}
});
return buildCreator;
}
}

View File

@ -0,0 +1,29 @@
import * as express from 'express';
import {promisify} from 'util';
import {PreviewServerError} from './preview-error';
/**
* Update the response to report that an error has occurred.
* @param res The response to configure as an error.
* @param err The error that needs to be reported.
*/
export async function respondWithError(res: express.Response, err: any): Promise<void> {
if (!(err instanceof PreviewServerError)) {
err = new PreviewServerError(500, String((err && err.message) || err));
}
res.status(err.status);
await promisify(res.end.bind(res))(err.message);
}
/**
* Throw an exception that describes the given error information.
* @param status The HTTP status code include in the error.
* @param error The error message to include in the error.
* @param req The request that triggered this error.
*/
export function throwRequestError(status: number, error: string, req: express.Request): never {
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
throw new PreviewServerError(status, message);
}

View File

@ -1,87 +0,0 @@
// Imports
import * as jwt from 'jsonwebtoken';
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
import {GithubTeams} from '../common/github-teams';
import {assertNotMissingOrEmpty} from '../common/utils';
import {UploadError} from './upload-error';
// Interfaces - Types
interface JwtPayload {
slug: string;
'pull-request': number;
}
// Enums
export enum BUILD_VERIFICATION_STATUS {
verifiedAndTrusted,
verifiedNotTrusted,
}
// Classes
export class BuildVerifier {
// Properties - Protected
protected githubPullRequests: GithubPullRequests;
protected githubTeams: GithubTeams;
// Constructor
constructor(protected secret: string, githubToken: string, protected repoSlug: string, organization: string,
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
assertNotMissingOrEmpty('secret', secret);
assertNotMissingOrEmpty('githubToken', githubToken);
assertNotMissingOrEmpty('repoSlug', repoSlug);
assertNotMissingOrEmpty('organization', organization);
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
this.githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
this.githubTeams = new GithubTeams(githubToken, organization);
}
// Methods - Public
public getPrIsTrusted(pr: number): Promise<boolean> {
return Promise.resolve().
then(() => this.githubPullRequests.fetch(pr)).
then(prInfo => this.hasLabel(prInfo, this.trustedPrLabel) ||
this.githubTeams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
}
public verify(expectedPr: number, authHeader: string): Promise<BUILD_VERIFICATION_STATUS> {
return Promise.resolve().
then(() => this.extractJwtString(authHeader)).
then(jwtString => this.verifyJwt(expectedPr, jwtString)).
then(jwtPayload => this.verifyPr(jwtPayload['pull-request'])).
catch(err => { throw new UploadError(403, `Error while verifying upload for PR ${expectedPr}: ${err}`); });
}
// Methods - Protected
protected extractJwtString(input: string): string {
return input.replace(/^token +/i, '');
}
protected hasLabel(prInfo: PullRequest, label: string) {
return prInfo.labels.some(labelObj => labelObj.name === label);
}
protected verifyJwt(expectedPr: number, token: string): Promise<JwtPayload> {
return new Promise((resolve, reject) => {
jwt.verify(token, this.secret, {issuer: 'Travis CI, GmbH'}, (err, payload: JwtPayload) => {
if (err) {
reject(err.message || err);
} else if (payload.slug !== this.repoSlug) {
reject(`jwt slug invalid. expected: ${this.repoSlug}`);
} else if (payload['pull-request'] !== expectedPr) {
reject(`jwt pull-request invalid. expected: ${expectedPr}`);
} else {
resolve(payload);
}
});
});
}
protected verifyPr(pr: number): Promise<BUILD_VERIFICATION_STATUS> {
return this.getPrIsTrusted(pr).
then(isTrusted => Promise.resolve(isTrusted ?
BUILD_VERIFICATION_STATUS.verifiedAndTrusted :
BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
}
}

View File

@ -1,39 +0,0 @@
// Imports
import {getEnvVar} from '../common/utils';
import {BuildVerifier} from './build-verifier';
// Run
_main();
// Functions
function _main() {
const secret = 'unused';
const githubToken = getEnvVar('AIO_GITHUB_TOKEN');
const repoSlug = getEnvVar('AIO_REPO_SLUG');
const organization = getEnvVar('AIO_GITHUB_ORGANIZATION');
const allowedTeamSlugs = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
const trustedPrLabel = getEnvVar('AIO_TRUSTED_PR_LABEL');
const pr = +getEnvVar('AIO_PREVERIFY_PR');
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, organization, allowedTeamSlugs,
trustedPrLabel);
// Exit codes:
// - 0: The PR can be automatically trusted (i.e. author belongs to trusted team or PR has the "trusted PR" label).
// - 1: An error occurred.
// - 2: The PR cannot be automatically trusted.
buildVerifier.getPrIsTrusted(pr).
then(isTrusted => {
if (!isTrusted) {
console.warn(
`The PR cannot be automatically verified, because it doesn't have the "${trustedPrLabel}" label and the ` +
`the author is not an active member of any of the following teams: ${allowedTeamSlugs.join(', ')}`);
}
process.exit(isTrusted ? 0 : 2);
}).
catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@ -1,34 +0,0 @@
// Imports
import {getEnvVar} from '../common/utils';
import {uploadServerFactory} from './upload-server-factory';
// Constants
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
const AIO_PREVIEW_DEPLOYMENT_TOKEN = getEnvVar('AIO_PREVIEW_DEPLOYMENT_TOKEN');
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
// Run
_main();
// Functions
function _main() {
uploadServerFactory.
create({
buildsDir: AIO_BUILDS_DIR,
domainName: AIO_DOMAIN_NAME,
githubOrganization: AIO_GITHUB_ORGANIZATION,
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
githubToken: AIO_GITHUB_TOKEN,
repoSlug: AIO_REPO_SLUG,
secret: AIO_PREVIEW_DEPLOYMENT_TOKEN,
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
}).
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
}

View File

@ -1,153 +0,0 @@
// Imports
import * as bodyParser from 'body-parser';
import * as express from 'express';
import * as http from 'http';
import {GithubPullRequests} from '../common/github-pull-requests';
import {assertNotMissingOrEmpty} from '../common/utils';
import {BuildCreator} from './build-creator';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from './build-verifier';
import {UploadError} from './upload-error';
// Constants
const AUTHORIZATION_HEADER = 'AUTHORIZATION';
const X_FILE_HEADER = 'X-FILE';
// Interfaces - Types
interface UploadServerConfig {
buildsDir: string;
domainName: string;
githubOrganization: string;
githubTeamSlugs: string[];
githubToken: string;
repoSlug: string;
secret: string;
trustedPrLabel: string;
}
// Classes
class UploadServerFactory {
// Methods - Public
public create({
buildsDir,
domainName,
githubOrganization,
githubTeamSlugs,
githubToken,
repoSlug,
secret,
trustedPrLabel,
}: UploadServerConfig): http.Server {
assertNotMissingOrEmpty('domainName', domainName);
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, githubOrganization, githubTeamSlugs,
trustedPrLabel);
const buildCreator = this.createBuildCreator(buildsDir, githubToken, repoSlug, domainName);
const middleware = this.createMiddleware(buildVerifier, buildCreator);
const httpServer = http.createServer(middleware as any);
httpServer.on('listening', () => {
const info = httpServer.address();
console.info(`Up and running (and listening on ${info.address}:${info.port})...`);
});
return httpServer;
}
// Methods - Protected
protected createBuildCreator(buildsDir: string, githubToken: string, repoSlug: string,
domainName: string): BuildCreator {
const buildCreator = new BuildCreator(buildsDir);
const githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
const postPreviewsComment = (pr: number, shas: string[]) => {
const body = shas.
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
join('\n');
return githubPullRequests.addComment(pr, body);
};
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
if (isPublic) {
postPreviewsComment(pr, [sha]);
}
});
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
if (isPublic && shas.length) {
postPreviewsComment(pr, shas);
}
});
return buildCreator;
}
protected createMiddleware(buildVerifier: BuildVerifier, buildCreator: BuildCreator): express.Express {
const middleware = express();
const jsonParser = bodyParser.json();
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
const pr = req.params[0];
const sha = req.params[1];
const archive = req.header(X_FILE_HEADER);
const authHeader = req.header(AUTHORIZATION_HEADER);
if (!authHeader) {
this.throwRequestError(401, `Missing or empty '${AUTHORIZATION_HEADER}' header`, req);
} else if (!archive) {
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
} else {
Promise.resolve().
then(() => buildVerifier.verify(+pr, authHeader)).
then(verStatus => verStatus === BUILD_VERIFICATION_STATUS.verifiedAndTrusted).
then(isPublic => buildCreator.create(pr, sha, archive, isPublic).
then(() => res.sendStatus(isPublic ? 201 : 202))).
catch(err => this.respondWithError(res, err));
}
});
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
middleware.post(/^\/pr-updated\/?$/, jsonParser, (req, res) => {
const {action, number: prNo}: {action?: string, number?: number} = req.body;
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
if (!visMayHaveChanged) {
res.sendStatus(200);
} else if (!prNo) {
this.throwRequestError(400, `Missing or empty 'number' field`, req);
} else {
Promise.resolve().
then(() => buildVerifier.getPrIsTrusted(prNo)).
then(isPublic => buildCreator.updatePrVisibility(String(prNo), isPublic)).
then(() => res.sendStatus(200)).
catch(err => this.respondWithError(res, err));
}
});
middleware.all('*', req => this.throwRequestError(404, 'Unknown resource', req));
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
return middleware;
}
protected respondWithError(res: express.Response, err: any) {
if (!(err instanceof UploadError)) {
err = new UploadError(500, String((err && err.message) || err));
}
const statusText = http.STATUS_CODES[err.status] || '???';
console.error(`Upload error: ${err.status} - ${statusText}`);
console.error(err.message);
res.status(err.status).end(err.message);
}
protected throwRequestError(status: number, error: string, req: express.Request) {
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
throw new UploadError(status, message);
}
}
// Exports
export const uploadServerFactory = new UploadServerFactory();

View File

@ -1,16 +1,37 @@
// Using the values below, we can fake the response of the corresponding methods in tests. This is export const enum BuildNums {
// necessary, because the test upload-server will be running as a separate node process, so we will BUILD_INFO_ERROR = 1,
// not have direct access to the code (e.g. for mocking). BUILD_INFO_404,
// (See also 'lib/verify-setup/start-test-upload-server.ts'.) BUILD_INFO_BUILD_FAILED,
BUILD_INFO_INVALID_GH_ORG,
BUILD_INFO_INVALID_GH_REPO,
CHANGED_FILES_ERROR,
CHANGED_FILES_404,
CHANGED_FILES_NONE,
BUILD_ARTIFACTS_ERROR,
BUILD_ARTIFACTS_404,
BUILD_ARTIFACTS_EMPTY,
BUILD_ARTIFACTS_MISSING,
DOWNLOAD_ARTIFACT_ERROR,
DOWNLOAD_ARTIFACT_404,
DOWNLOAD_ARTIFACT_TOO_BIG,
TRUST_CHECK_ERROR,
TRUST_CHECK_UNTRUSTED,
TRUST_CHECK_TRUSTED_LABEL,
TRUST_CHECK_ACTIVE_TRUSTED_USER,
TRUST_CHECK_INACTIVE_TRUSTED_USER,
}
/* tslint:disable: variable-name */ export const enum PrNums {
CHANGED_FILES_ERROR = 1,
CHANGED_FILES_404,
CHANGED_FILES_NONE,
TRUST_CHECK_ERROR,
TRUST_CHECK_UNTRUSTED,
TRUST_CHECK_TRUSTED_LABEL,
TRUST_CHECK_ACTIVE_TRUSTED_USER,
TRUST_CHECK_INACTIVE_TRUSTED_USER,
}
// Special values to be used as `authHeader` in `BuildVerifier#verify()`. export const SHA = '1234567890'.repeat(4);
export const BV_verify_error = 'FAKE_VERIFICATION_ERROR'; export const ALT_SHA = 'abcde'.repeat(8);
export const BV_verify_verifiedNotTrusted = 'FAKE_VERIFIED_NOT_TRUSTED'; export const SIMILAR_SHA = SHA.slice(0, -1) + 'A';
// Special values to be used as `pr` in `BuildVerifier#getPrIsTrusted()`.
export const BV_getPrIsTrusted_error = 32203;
export const BV_getPrIsTrusted_notTrusted = 72457;
/* tslint:enable: variable-name */

View File

@ -0,0 +1,10 @@
declare module 'delete-empty' {
interface Options {
dryRun: boolean;
verbose: boolean;
filter: (filePath: string) => boolean;
}
export default function deleteEmpty(cwd: string, options?: Options): Promise<string[]>;
export default function deleteEmpty(cwd: string, options?: Options, callback?: (err: any, deleted: string[]) => void): void;
export function sync(cwd: string, options?: Options): string[];
}

View File

@ -4,18 +4,14 @@ import * as fs from 'fs';
import * as http from 'http'; import * as http from 'http';
import * as path from 'path'; import * as path from 'path';
import * as shell from 'shelljs'; import * as shell from 'shelljs';
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants'; import {AIO_DOWNLOADS_DIR, HIDDEN_DIR_PREFIX} from '../common/constants';
import {getEnvVar} from '../common/utils'; import {
AIO_BUILDS_DIR,
// Constans AIO_NGINX_PORT_HTTP,
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR'); AIO_NGINX_PORT_HTTPS,
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME'); AIO_WWW_USER,
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP'); } from '../common/env-variables';
const TEST_AIO_NGINX_PORT_HTTPS = +getEnvVar('TEST_AIO_NGINX_PORT_HTTPS'); import {computeShortSha, createLogger} from '../common/utils';
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
const WWW_USER = getEnvVar('AIO_WWW_USER');
// Interfaces - Types // Interfaces - Types
export interface CmdResult { success: boolean; err: Error | null; stdout: string; stderr: string; } export interface CmdResult { success: boolean; err: Error | null; stdout: string; stderr: string; }
@ -27,61 +23,50 @@ export type VerifyCmdResultFn = (result: CmdResult) => void;
// Classes // Classes
class Helper { class Helper {
// Properties - Public
public get buildsDir() { return TEST_AIO_BUILDS_DIR; }
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
public get wwwUser() { return WWW_USER; }
// Properties - Protected // Properties - Protected
protected cleanUpFns: CleanUpFn[] = []; protected cleanUpFns: CleanUpFn[] = [];
protected portPerScheme: {[scheme: string]: number} = { protected portPerScheme: {[scheme: string]: number} = {
http: this.nginxPortHttp, http: AIO_NGINX_PORT_HTTP,
https: this.nginxPortHttps, https: AIO_NGINX_PORT_HTTPS,
}; };
private logger = createLogger('TestHelper');
// Constructor // Constructor
constructor() { constructor() {
shell.mkdir('-p', this.buildsDir); shell.mkdir('-p', AIO_BUILDS_DIR);
shell.exec(`chown -R ${this.wwwUser} ${this.buildsDir}`); shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_BUILDS_DIR}`);
shell.mkdir('-p', AIO_DOWNLOADS_DIR);
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_DOWNLOADS_DIR}`);
} }
// Methods - Public // Methods - Public
public buildExists(pr: string, sha = '', isPublic = true, legacy = false): boolean { public cleanUp(): void {
const prDir = this.getPrDir(pr, isPublic);
const dir = !sha ? prDir : this.getShaDir(prDir, sha, legacy);
return fs.existsSync(dir);
}
public cleanUp() {
while (this.cleanUpFns.length) { while (this.cleanUpFns.length) {
// Clean-up fns remove themselves from the list. // Clean-up fns remove themselves from the list.
this.cleanUpFns[0](); this.cleanUpFns[0]();
} }
if (fs.readdirSync(this.buildsDir).length) { const leftoverDownloads = fs.readdirSync(AIO_DOWNLOADS_DIR);
throw new Error(`Directory '${this.buildsDir}' is not empty after clean-up.`); const leftoverBuilds = fs.readdirSync(AIO_BUILDS_DIR);
if (leftoverDownloads.length) {
this.logger.log(`Downloads directory '${AIO_DOWNLOADS_DIR}' is not empty after clean-up.`, leftoverDownloads);
shell.rm('-rf', `${AIO_DOWNLOADS_DIR}/*`);
}
if (leftoverBuilds.length) {
this.logger.log(`Builds directory '${AIO_BUILDS_DIR}' is not empty after clean-up.`, leftoverBuilds);
shell.rm('-rf', `${AIO_BUILDS_DIR}/*`);
}
if (leftoverBuilds.length || leftoverDownloads.length) {
throw new Error(`Unexpected test files not cleaned up.`);
} }
} }
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn { public createDummyBuild(pr: number, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
const inputDir = this.getShaDir(this.getPrDir(`uploaded/${pr}`, true), sha);
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
const cmd2 = `chown ${this.wwwUser} ${archivePath}`;
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true, true);
shell.exec(cmd1);
shell.exec(cmd2);
cleanUpTemp();
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
}
public createDummyBuild(pr: string, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
const prDir = this.getPrDir(pr, isPublic); const prDir = this.getPrDir(pr, isPublic);
const shaDir = this.getShaDir(prDir, sha, legacy); const shaDir = this.getShaDir(prDir, sha, legacy);
const idxPath = path.join(shaDir, 'index.html'); const idxPath = path.join(shaDir, 'index.html');
@ -89,34 +74,21 @@ class Helper {
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force); this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force); this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
shell.exec(`chown -R ${this.wwwUser} ${prDir}`); shell.exec(`chown -R ${AIO_WWW_USER} ${prDir}`);
return this.createCleanUpFn(() => shell.rm('-rf', prDir)); return this.createCleanUpFn(() => shell.rm('-rf', prDir));
} }
public deletePrDir(pr: string, isPublic = true) { public getPrDir(pr: number, isPublic: boolean): string {
const prDir = this.getPrDir(pr, isPublic); const prDirName = isPublic ? '' + pr : HIDDEN_DIR_PREFIX + pr;
return path.join(AIO_BUILDS_DIR, prDirName);
if (fs.existsSync(prDir)) {
shell.chmod('-R', 'a+w', prDir);
shell.rm('-rf', prDir);
}
}
public getPrDir(pr: string, isPublic: boolean): string {
const prDirName = isPublic ? pr : HIDDEN_DIR_PREFIX + pr;
return path.join(this.buildsDir, prDirName);
} }
public getShaDir(prDir: string, sha: string, legacy = false): string { public getShaDir(prDir: string, sha: string, legacy = false): string {
return path.join(prDir, legacy ? sha : this.getShordSha(sha)); return path.join(prDir, legacy ? sha : computeShortSha(sha));
} }
public getShordSha(sha: string): string { public readBuildFile(pr: number, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
return sha.substr(0, SHORT_SHA_LEN);
}
public readBuildFile(pr: string, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy); const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
const absFilePath = path.join(shaDir, relFilePath); const absFilePath = path.join(shaDir, relFilePath);
return fs.readFileSync(absFilePath, 'utf8'); return fs.readFileSync(absFilePath, 'utf8');
@ -129,7 +101,7 @@ class Helper {
}); });
} }
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory) { public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory): void {
Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme])); Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme]));
} }
@ -153,9 +125,9 @@ class Helper {
// Only keep the last to sections (final headers and body). // Only keep the last to sections (final headers and body).
if (!result.success) { if (!result.success) {
console.log('Stdout:', result.stdout); this.logger.log('Stdout:', result.stdout);
console.log('Stderr:', result.stderr); this.logger.error('Stderr:', result.stderr);
console.log('Error:', result.err); this.logger.error('Error:', result.err);
} }
expect(result.success).toBe(true); expect(result.success).toBe(true);
@ -164,14 +136,14 @@ class Helper {
}; };
} }
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string, isPublic = true, public writeBuildFile(pr: number, sha: string, relFilePath: string, content: string, isPublic = true,
legacy = false): CleanUpFn { legacy = false): void {
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy); const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
const absFilePath = path.join(shaDir, relFilePath); const absFilePath = path.join(shaDir, relFilePath);
return this.writeFile(absFilePath, {content}, true); this.writeFile(absFilePath, {content}, true);
} }
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): CleanUpFn { public writeFile(filePath: string, {content, size}: FileSpecs, force = false): void {
if (!force && fs.existsSync(filePath)) { if (!force && fs.existsSync(filePath)) {
throw new Error(`Refusing to overwrite existing file '${filePath}'.`); throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
} }
@ -189,9 +161,7 @@ class Helper {
// Create a file with the specified content. // Create a file with the specified content.
fs.writeFileSync(filePath, content || ''); fs.writeFileSync(filePath, content || '');
} }
shell.exec(`chown ${this.wwwUser} ${filePath}`); shell.exec(`chown ${AIO_WWW_USER} ${filePath}`);
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
} }
// Methods - Protected // Methods - Protected
@ -210,5 +180,54 @@ class Helper {
} }
} }
interface CurlOptions {
method?: string;
options?: string;
data?: any;
url?: string;
extraPath?: string;
}
export function makeCurl(baseUrl: string) {
return function curl({
method = 'POST',
options = '',
data = {},
url = baseUrl,
extraPath = '',
}: CurlOptions) {
const dataString = data ? JSON.stringify(data) : '';
const cmd = `curl -iLX ${method} ` +
`${options} ` +
`--header "Content-Type: application/json" ` +
`--data '${dataString}' ` +
`${url}${extraPath}`;
return helper.runCmd(cmd);
};
}
export interface PayloadData {
data: {
payload: {
build_num: number,
build_parameters: {
CIRCLE_JOB: string;
};
};
};
}
export function payload(buildNum: number): PayloadData {
return {
data: {
payload: {
build_num: buildNum,
build_parameters: { CIRCLE_JOB: 'aio_preview' },
},
},
};
}
// Exports // Exports
export const helper = new Helper(); export const helper = new Helper();

View File

@ -0,0 +1,7 @@
declare module jasmine {
interface Matchers {
toExistAsAFile(remove = true): boolean;
toExistAsABuild(remove = true): boolean;
toExistAsAnArtifact(remove = true): boolean;
}
}

View File

@ -0,0 +1,88 @@
import {sync as deleteEmpty} from 'delete-empty';
import {existsSync, unlinkSync} from 'fs';
import {join} from 'path';
import {AIO_DOWNLOADS_DIR} from '../common/constants';
import {computeShortSha} from '../common/utils';
import {SHA} from './constants';
import {helper} from './helper';
function checkFile(filePath: string, remove: boolean): boolean {
const exists = existsSync(filePath);
if (exists && remove) {
// if we expected the file to exist then we remove it to prevent leftover file errors
unlinkSync(filePath);
}
return exists;
}
function getArtifactPath(prNum: number, sha: string = SHA): string {
return `${AIO_DOWNLOADS_DIR}/${prNum}-${computeShortSha(sha)}-aio-snapshot.tgz`;
}
function checkFiles(prNum: number, isPublic: boolean, sha: string, isLegacy: boolean, remove: boolean) {
const files = ['/index.html', '/foo/bar.js'];
const prPath = helper.getPrDir(prNum, isPublic);
const shaPath = helper.getShaDir(prPath, sha, isLegacy);
const existingFiles: string[] = [];
const missingFiles: string[] = [];
files
.map(file => join(shaPath, file))
.forEach(file => (checkFile(file, remove) ? existingFiles : missingFiles).push(file));
deleteEmpty(prPath);
return { existingFiles, missingFiles };
}
class ToExistAsAFile implements jasmine.CustomMatcher {
public compare(actual: string, remove = true): jasmine.CustomMatcherResult {
const pass = checkFile(actual, remove);
return {
message: `Expected file at "${actual}" ${pass ? 'not' : ''} to exist`,
pass,
};
}
}
class ToExistAsAnArtifact implements jasmine.CustomMatcher {
public compare(actual: {prNum: number, sha?: string}, remove = true): jasmine.CustomMatcherResult {
const { prNum, sha = SHA } = actual;
const filePath = getArtifactPath(prNum, sha);
const pass = checkFile(filePath, remove);
return {
message: `Expected artifact "PR:${prNum}, SHA:${sha}, FILE:${filePath}" ${pass ? 'not' : '\b'} to exist`,
pass,
};
}
}
class ToExistAsABuild implements jasmine.CustomMatcher {
public compare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}, remove = true):
jasmine.CustomMatcherResult {
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
const {missingFiles} = checkFiles(prNum, isPublic, sha, isLegacy, remove);
return {
message: `Expected files for build "PR:${prNum}, SHA:${sha}" to exist:\n` +
missingFiles.map(file => ` - ${file}`).join('\n'),
pass: missingFiles.length === 0,
};
}
public negativeCompare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}):
jasmine.CustomMatcherResult {
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
const { existingFiles } = checkFiles(prNum, isPublic, sha, isLegacy, false);
return {
message: `Expected files for build "PR:${prNum}, SHA:${sha}" not to exist:\n` +
existingFiles.map(file => ` - ${file}`).join('\n'),
pass: existingFiles.length === 0,
};
}
}
export const customMatchers = {
toExistAsABuild: () => new ToExistAsABuild(),
toExistAsAFile: () => new ToExistAsAFile(),
toExistAsAnArtifact: () => new ToExistAsAnArtifact(),
};

View File

@ -0,0 +1,171 @@
/* tslint:disable:max-line-length */
import * as nock from 'nock';
import * as tar from 'tar-stream';
import {gzipSync} from 'zlib';
import {createLogger, getEnvVar} from '../common/utils';
import {BuildNums, PrNums, SHA} from './constants';
// We are using the `nock` library to fake responses from REST requests, when testing.
// This is necessary, because the test preview-server runs as a separate node process to
// the test harness, so we do not have direct access to the code (e.g. for mocking).
// (See also 'lib/verify-setup/start-test-preview-server.ts'.)
// Each of the potential requests to an external API (e.g. Github or CircleCI) are mocked
// below and return a suitable response. This is quite complicated to setup since the
// response from, say, CircleCI will affect what request is made to, say, Github.
const logger = createLogger('NOCK');
const log = (...args: any[]) => {
// Filter out non-matching URL checks
if (!/^matching.+: false$/.test(args[0])) {
logger.log(...args);
}
};
const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
const ACTIVE_TRUSTED_USER = 'active-trusted-user';
const INACTIVE_TRUSTED_USER = 'inactive-trusted-user';
const UNTRUSTED_USER = 'untrusted-user';
const BASIC_BUILD_INFO = {
branch: `pull/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
failed: false,
reponame: AIO_GITHUB_REPO,
username: AIO_GITHUB_ORGANIZATION,
vcs_revision: SHA,
};
const ISSUE_INFO_TRUSTED_LABEL = { labels: [{ name: AIO_TRUSTED_PR_LABEL }], user: { login: UNTRUSTED_USER } };
const ISSUE_INFO_ACTIVE_TRUSTED_USER = { labels: [], user: { login: ACTIVE_TRUSTED_USER } };
const ISSUE_INFO_INACTIVE_TRUSTED_USER = { labels: [], user: { login: INACTIVE_TRUSTED_USER } };
const ISSUE_INFO_UNTRUSTED = { labels: [], user: { login: UNTRUSTED_USER } };
const ACTIVE_STATE = { state: 'active' };
const INACTIVE_STATE = { state: 'inactive' };
const TEST_TEAM_INFO = AIO_GITHUB_TEAM_SLUGS.map((slug, index) => ({ slug, id: index }));
const CIRCLE_CI_API_HOST = 'https://circleci.com';
const CIRCLE_CI_TOKEN_PARAM = `circle-token=${AIO_CIRCLE_CI_TOKEN}`;
const ARTIFACT_1 = { path: 'artifact-1', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-1`, _urlPath: '/artifacts/artifact-1' };
const ARTIFACT_2 = { path: 'artifact-2', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-2`, _urlPath: '/artifacts/artifact-2' };
const ARTIFACT_3 = { path: 'artifact-3', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-3`, _urlPath: '/artifacts/artifact-3' };
const ARTIFACT_ERROR = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/error`, _urlPath: '/artifacts/error' };
const ARTIFACT_404 = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/404`, _urlPath: '/artifacts/404' };
const ARTIFACT_VALID_TRUSTED_USER = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/user`, _urlPath: '/artifacts/valid/user' };
const ARTIFACT_VALID_TRUSTED_LABEL = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/label`, _urlPath: '/artifacts/valid/label' };
const ARTIFACT_VALID_UNTRUSTED = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/untrusted`, _urlPath: '/artifacts/valid/untrusted' };
const CIRCLE_CI_BUILD_INFO_URL = `/api/v1.1/project/github/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}`;
const buildInfoUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}?${CIRCLE_CI_TOKEN_PARAM}`;
const buildArtifactsUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}/artifacts?${CIRCLE_CI_TOKEN_PARAM}`;
const buildInfo = (prNum: number) => ({ ...BASIC_BUILD_INFO, branch: `pull/${prNum}` });
const GITHUB_API_HOST = 'https://api.github.com';
const GITHUB_ISSUES_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/issues`;
const GITHUB_PULLS_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/pulls`;
const GITHUB_TEAMS_URL = `/orgs/${AIO_GITHUB_ORGANIZATION}/teams`;
const getIssueUrl = (prNum: number) => `${GITHUB_ISSUES_URL}/${prNum}`;
const getFilesUrl = (prNum: number) => `${GITHUB_PULLS_URL}/${prNum}/files`;
const getCommentUrl = (prNum: number) => `${getIssueUrl(prNum)}/comments`;
const getTeamMembershipUrl = (teamId: number, username: string) => `/teams/${teamId}/memberships/${username}`;
const createArchive = (buildNum: number, prNum: number, sha: string) => {
logger.log('createArchive', buildNum, prNum, sha);
const pack = tar.pack();
pack.entry({name: 'index.html'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /index.html`);
pack.entry({name: 'foo/bar.js'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /foo/bar.js`);
pack.finalize();
const zip = gzipSync(pack.read());
return zip;
};
// Create request scopes
const circleCiApi = nock(CIRCLE_CI_API_HOST).log(log).persist();
const githubApi = nock(GITHUB_API_HOST).log(log).persist().matchHeader('Authorization', `token ${AIO_GITHUB_TOKEN}`);
//////////////////////////////
// GENERAL responses
githubApi.get(GITHUB_TEAMS_URL + '?page=0&per_page=100').reply(200, TEST_TEAM_INFO);
githubApi.post(getCommentUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200);
// BUILD_INFO errors
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_ERROR)).replyWithError('BUILD_INFO_ERROR');
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_404)).reply(404, 'BUILD_INFO_404');
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_BUILD_FAILED)).reply(200, { ...BASIC_BUILD_INFO, failed: true });
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_ORG)).reply(200, { ...BASIC_BUILD_INFO, username: 'bad' });
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_REPO)).reply(200, { ...BASIC_BUILD_INFO, reponame: 'bad' });
// CHANGED FILE errors
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_ERROR)).reply(200, buildInfo(PrNums.CHANGED_FILES_ERROR));
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_ERROR)).replyWithError('CHANGED_FILES_ERROR');
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_404)).reply(200, buildInfo(PrNums.CHANGED_FILES_404));
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_404)).reply(404, 'CHANGED_FILES_404');
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_NONE)).reply(200, buildInfo(PrNums.CHANGED_FILES_NONE));
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_NONE)).reply(200, []);
// ARTIFACT URL errors
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).replyWithError('BUILD_ARTIFACTS_ERROR');
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(404, 'BUILD_ARTIFACTS_ERROR');
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, []);
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, [ARTIFACT_1, ARTIFACT_2, ARTIFACT_3]);
// ARTIFACT DOWNLOAD errors
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, [ARTIFACT_ERROR]);
circleCiApi.get(ARTIFACT_ERROR._urlPath).replyWithError(ARTIFACT_ERROR._urlPath);
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, [ARTIFACT_404]);
circleCiApi.get(ARTIFACT_ERROR._urlPath).reply(404, ARTIFACT_ERROR._urlPath);
// TRUST CHECK errors
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ERROR));
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ERROR)).reply(200, [{ filename: 'aio/a' }]);
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ERROR)).replyWithError('TRUST_CHECK_ERROR');
// ACTIVE TRUSTED USER response
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
circleCiApi.get(ARTIFACT_VALID_TRUSTED_USER._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA));
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_ACTIVE_TRUSTED_USER);
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
// TRUSTED LABEL response
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, BASIC_BUILD_INFO);
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [{ filename: 'aio/a' }]);
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [ARTIFACT_VALID_TRUSTED_LABEL]);
circleCiApi.get(ARTIFACT_VALID_TRUSTED_LABEL._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_TRUSTED_LABEL, PrNums.TRUST_CHECK_TRUSTED_LABEL, SHA));
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, ISSUE_INFO_TRUSTED_LABEL);
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
// INACTIVE TRUSTED USER response
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_INACTIVE_TRUSTED_USER);
githubApi.get(getTeamMembershipUrl(0, INACTIVE_TRUSTED_USER)).reply(200, INACTIVE_STATE);
// UNTRUSTED reponse
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, buildInfo(PrNums.TRUST_CHECK_UNTRUSTED));
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, [{ filename: 'aio/a' }]);
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, [ARTIFACT_VALID_UNTRUSTED]);
circleCiApi.get(ARTIFACT_VALID_UNTRUSTED._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_UNTRUSTED, PrNums.TRUST_CHECK_UNTRUSTED, SHA));
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, ISSUE_INFO_UNTRUSTED);
githubApi.get(getTeamMembershipUrl(0, UNTRUSTED_USER)).reply(404);

View File

@ -1,17 +1,22 @@
// Imports // Imports
import * as path from 'path'; import * as path from 'path';
import {rm} from 'shelljs';
import {AIO_BUILDS_DIR, AIO_NGINX_HOSTNAME, AIO_NGINX_PORT_HTTP, AIO_NGINX_PORT_HTTPS} from '../common/env-variables';
import {computeShortSha} from '../common/utils';
import {helper as h} from './helper'; import {helper as h} from './helper';
import {customMatchers} from './jasmine-custom-matchers';
// Tests // Tests
describe(`nginx`, () => { describe(`nginx`, () => {
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000); beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
beforeEach(() => jasmine.addMatchers(customMatchers));
afterEach(() => h.cleanUp()); afterEach(() => h.cleanUp());
it('should redirect HTTP to HTTPS', done => { it('should redirect HTTP to HTTPS', done => {
const httpHost = `${h.nginxHostname}:${h.nginxPortHttp}`; const httpHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTP}`;
const httpsHost = `${h.nginxHostname}:${h.nginxPortHttps}`; const httpsHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTPS}`;
const urlMap = { const urlMap = {
[`http://${httpHost}/`]: `https://${httpsHost}/`, [`http://${httpHost}/`]: `https://${httpsHost}/`,
[`http://${httpHost}/foo`]: `https://${httpsHost}/foo`, [`http://${httpHost}/foo`]: `https://${httpsHost}/foo`,
@ -32,13 +37,13 @@ describe(`nginx`, () => {
h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => { h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => {
const hostname = h.nginxHostname; const hostname = AIO_NGINX_HOSTNAME;
const host = `${hostname}:${port}`; const host = `${hostname}:${port}`;
const pr = '9'; const pr = 9;
const sha9 = '9'.repeat(40); const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40); const sha0 = '0'.repeat(40);
const shortSha9 = h.getShordSha(sha9); const shortSha9 = computeShortSha(sha9);
const shortSha0 = h.getShordSha(sha0); const shortSha0 = computeShortSha(sha0);
describe(`pr<pr>-<sha>.${host}/*`, () => { describe(`pr<pr>-<sha>.${host}/*`, () => {
@ -50,6 +55,11 @@ describe(`nginx`, () => {
h.createDummyBuild(pr, sha0); h.createDummyBuild(pr, sha0);
}); });
afterEach(() => {
expect({ prNum: pr, sha: sha9 }).toExistAsABuild();
expect({ prNum: pr, sha: sha0 }).toExistAsABuild();
});
it('should return /index.html', done => { it('should return /index.html', done => {
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`; const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
@ -63,17 +73,19 @@ describe(`nginx`, () => {
}); });
it('should return /index.html (for legacy builds)', done => { it('should return /index.html (for legacy builds)', async () => {
const origin = `${scheme}://pr${pr}-${sha9}.${host}`; const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`); const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
h.createDummyBuild(pr, sha9, true, false, true); h.createDummyBuild(pr, sha9, true, false, true);
Promise.all([ await Promise.all([
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)), h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)), h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)), h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
]).then(done); ]);
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
}); });
@ -86,15 +98,15 @@ describe(`nginx`, () => {
}); });
it('should return /foo/bar.js (for legacy builds)', done => { it('should return /foo/bar.js (for legacy builds)', async () => {
const origin = `${scheme}://pr${pr}-${sha9}.${host}`; const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`); const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
h.createDummyBuild(pr, sha9, true, false, true); h.createDummyBuild(pr, sha9, true, false, true);
h.runCmd(`curl -iL ${origin}/foo/bar.js`). await h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(h.verifyResponse(200, bodyRegex));
then(h.verifyResponse(200, bodyRegex)).
then(done); expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
}); });
@ -126,7 +138,7 @@ describe(`nginx`, () => {
it('should respond with 404 for unknown PRs/SHAs', done => { it('should respond with 404 for unknown PRs/SHAs', done => {
const otherPr = 54321; const otherPr = 54321;
const otherShortSha = h.getShordSha('8'.repeat(40)); const otherShortSha = computeShortSha('8'.repeat(40));
Promise.all([ Promise.all([
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
@ -174,39 +186,41 @@ describe(`nginx`, () => {
describe('(for hidden builds)', () => { describe('(for hidden builds)', () => {
it('should respond with 404 for any file or directory', done => { it('should respond with 404 for any file or directory', async () => {
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`; const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
const assert404 = h.verifyResponse(404); const assert404 = h.verifyResponse(404);
h.createDummyBuild(pr, sha9, false); h.createDummyBuild(pr, sha9, false);
expect(h.buildExists(pr, sha9, false)).toBe(true);
Promise.all([ await Promise.all([
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404), h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
h.runCmd(`curl -iL ${origin}/`).then(assert404), h.runCmd(`curl -iL ${origin}/`).then(assert404),
h.runCmd(`curl -iL ${origin}`).then(assert404), h.runCmd(`curl -iL ${origin}`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404), h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404), h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo`).then(assert404), h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
]).then(done); ]);
expect({ prNum: pr, sha: sha9, isPublic: false }).toExistAsABuild();
}); });
it('should respond with 404 for any file or directory (for legacy builds)', done => { it('should respond with 404 for any file or directory (for legacy builds)', async () => {
const origin = `${scheme}://pr${pr}-${sha9}.${host}`; const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
const assert404 = h.verifyResponse(404); const assert404 = h.verifyResponse(404);
h.createDummyBuild(pr, sha9, false, false, true); h.createDummyBuild(pr, sha9, false, false, true);
expect(h.buildExists(pr, sha9, false, true)).toBe(true);
Promise.all([ await Promise.all([
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404), h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
h.runCmd(`curl -iL ${origin}/`).then(assert404), h.runCmd(`curl -iL ${origin}/`).then(assert404),
h.runCmd(`curl -iL ${origin}`).then(assert404), h.runCmd(`curl -iL ${origin}`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404), h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404), h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
h.runCmd(`curl -iL ${origin}/foo`).then(assert404), h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
]).then(done); ]);
expect({ prNum: pr, sha: sha9, isPublic: false, isLegacy: true }).toExistAsABuild();
}); });
}); });
@ -238,10 +252,10 @@ describe(`nginx`, () => {
}); });
describe(`${host}/create-build/<pr>/<sha>`, () => { describe(`${host}/circle-build`, () => {
it('should disallow non-POST requests', done => { it('should disallow non-POST requests', done => {
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`; const url = `${scheme}://${host}/circle-build`;
Promise.all([ Promise.all([
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])), h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
@ -252,31 +266,9 @@ describe(`nginx`, () => {
}); });
it(`should reject files larger than ${h.uploadMaxSize}B (according to header)`, done => { it('should pass requests through to the preview server', done => {
const headers = `--header "Content-Length: ${1.5 * h.uploadMaxSize}"`; h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`; then(h.verifyResponse(400, /Incorrect body content. Expected JSON/)).
h.runCmd(`curl -iLX POST ${headers} ${url}`).
then(h.verifyResponse([413, 'Request Entity Too Large'])).
then(done);
});
it(`should reject files larger than ${h.uploadMaxSize}B (without header)`, done => {
const filePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
h.writeFile(filePath, {size: 1.5 * h.uploadMaxSize});
h.runCmd(`curl -iLX POST --data-binary "@${filePath}" ${url}`).
then(h.verifyResponse([413, 'Request Entity Too Large'])).
then(done);
});
it('should pass requests through to the upload server', done => {
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(401, /Missing or empty 'AUTHORIZATION' header/)).
then(done); then(done);
}); });
@ -285,35 +277,16 @@ describe(`nginx`, () => {
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`; const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
Promise.all([ Promise.all([
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/foo/circle-build/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/foo-circle-build/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/fooncircle-build/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/circle-build/foo/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/circle-build-foo/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/circle-buildnfoo/`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/circle-build/pr`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)), h.runCmd(`${cmdPrefix}/circle-build/42`).then(h.verifyResponse(404)),
]).then(done); ]).then(done);
}); });
it('should reject PRs with leading zeros', done => {
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/0${pr}/${sha9}`).
then(h.verifyResponse(404)).
then(done);
});
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
const cmdPrefix = `curl -iLX POST ${scheme}://${host}/create-build/${pr}`;
const bodyRegex = /Missing or empty 'AUTHORIZATION' header/;
Promise.all([
h.runCmd(`${cmdPrefix}/0${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/${sha0}`).then(h.verifyResponse(401, bodyRegex)),
]).then(done);
});
}); });
@ -331,17 +304,13 @@ describe(`nginx`, () => {
}); });
it('should pass requests through to the upload server', done => { it('should pass requests through to the preview server', done => {
const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`; const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`;
const cmd1 = `${cmdPrefix} ${url}`; const cmd1 = `${cmdPrefix} ${url}`;
const cmd2 = `${cmdPrefix} --data '{"number":${pr}}' ${url}`;
const cmd3 = `${cmdPrefix} --data '{"number":${pr},"action":"foo"}' ${url}`;
Promise.all([ Promise.all([
h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)), h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)),
h.runCmd(cmd2).then(h.verifyResponse(200)),
h.runCmd(cmd3).then(h.verifyResponse(200)),
]).then(done); ]).then(done);
}); });
@ -364,13 +333,15 @@ describe(`nginx`, () => {
describe(`${host}/*`, () => { describe(`${host}/*`, () => {
it('should respond with 404 for unknown URLs (even if the resource exists)', done => { beforeEach(() => {
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => { ['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
const absFilePath = path.join(h.buildsDir, relFilePath); const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
h.writeFile(absFilePath, {content: `File: /${relFilePath}`}); return h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
}); });
});
Promise.all([ it('should respond with 404 for unknown URLs (even if the resource exists)', async () => {
await Promise.all([
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
@ -379,7 +350,14 @@ describe(`nginx`, () => {
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)), h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
]).then(done); ]);
});
afterEach(() => {
['index.html', 'foo.js', 'foo/index.html', 'foo'].forEach(relFilePath => {
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
rm('-r', absFilePath);
});
}); });
}); });

View File

@ -0,0 +1,483 @@
// Imports
import * as fs from 'fs';
import {join} from 'path';
import {AIO_PREVIEW_SERVER_HOSTNAME, AIO_PREVIEW_SERVER_PORT, AIO_WWW_USER} from '../common/env-variables';
import {computeShortSha} from '../common/utils';
import {ALT_SHA, BuildNums, PrNums, SHA, SIMILAR_SHA} from './constants';
import {helper as h, makeCurl, payload} from './helper';
import {customMatchers} from './jasmine-custom-matchers';
// Tests
describe('preview-server', () => {
const hostname = AIO_PREVIEW_SERVER_HOSTNAME;
const port = AIO_PREVIEW_SERVER_PORT;
const host = `http://${hostname}:${port}`;
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
beforeEach(() => jasmine.addMatchers(customMatchers));
afterEach(() => h.cleanUp());
describe(`${host}/circle-build`, () => {
const curl = makeCurl(`${host}/circle-build`);
it('should disallow non-POST requests', async () => {
const bodyRegex = /^Unknown resource/;
await Promise.all([
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
]);
});
it('should respond with 404 for unknown paths', async () => {
await Promise.all([
curl({url: `${host}/foo/circle-build`}).then(h.verifyResponse(404)),
curl({url: `${host}/foo-circle-build`}).then(h.verifyResponse(404)),
curl({url: `${host}/fooncircle-build`}).then(h.verifyResponse(404)),
curl({url: `${host}/circle-build/foo`}).then(h.verifyResponse(404)),
curl({url: `${host}/circle-build-foo`}).then(h.verifyResponse(404)),
curl({url: `${host}/circle-buildnfoo`}).then(h.verifyResponse(404)),
curl({url: `${host}/circle-build/pr`}).then(h.verifyResponse(404)),
curl({url: `${host}/circle-build42`}).then(h.verifyResponse(404)),
]);
});
it('should respond with 400 if the body is not valid', async () => {
await Promise.all([
curl({ data: '' }).then(h.verifyResponse(400)),
curl({ data: {} }).then(h.verifyResponse(400)),
curl({ data: { payload: {} } }).then(h.verifyResponse(400)),
curl({ data: { payload: { build_num: 1 } } }).then(h.verifyResponse(400)),
curl({ data: { payload: { build_num: 1, build_parameters: {} } } }).then(h.verifyResponse(400)),
curl(payload(0)).then(h.verifyResponse(400)),
curl(payload(-1)).then(h.verifyResponse(400)),
]);
});
it('should respond with 500 if the CircleCI API request errors', async () => {
await curl(payload(BuildNums.BUILD_INFO_ERROR)).then(h.verifyResponse(500));
await curl(payload(BuildNums.BUILD_INFO_404)).then(h.verifyResponse(500));
});
it('should respond with 204 if the build on CircleCI failed', async () => {
await curl(payload(BuildNums.BUILD_INFO_BUILD_FAILED)).then(h.verifyResponse(204));
});
it('should respond with 500 if the github org from CircleCI does not match what is configured', async () => {
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_ORG)).then(h.verifyResponse(500));
});
it('should respond with 500 if the github repo from CircleCI does not match what is configured', async () => {
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_REPO)).then(h.verifyResponse(500));
});
it('should respond with 500 if the github files API errors', async () => {
await curl(payload(BuildNums.CHANGED_FILES_ERROR)).then(h.verifyResponse(500));
await curl(payload(BuildNums.CHANGED_FILES_404)).then(h.verifyResponse(500));
});
it('should respond with 204 if no significant files are changed by the PR', async () => {
await curl(payload(BuildNums.CHANGED_FILES_NONE)).then(h.verifyResponse(204));
});
it('should respond with 500 if the CircleCI artifact API fails', async () => {
await curl(payload(BuildNums.BUILD_ARTIFACTS_ERROR)).then(h.verifyResponse(500));
await curl(payload(BuildNums.BUILD_ARTIFACTS_404)).then(h.verifyResponse(500));
await curl(payload(BuildNums.BUILD_ARTIFACTS_EMPTY)).then(h.verifyResponse(500));
await curl(payload(BuildNums.BUILD_ARTIFACTS_MISSING)).then(h.verifyResponse(500));
});
it('should respond with 500 if fetching the artifact errors', async () => {
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).then(h.verifyResponse(500));
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_404)).then(h.verifyResponse(500));
});
it('should respond with 500 if the GH trusted API fails', async () => {
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
});
it('should respond with 201 if a new public build is created', async () => {
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER))
.then(h.verifyResponse(201));
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER }).toExistAsABuild();
});
it('should respond with 202 if a new private build is created', async () => {
await curl(payload(BuildNums.TRUST_CHECK_UNTRUSTED)).then(h.verifyResponse(202));
expect({ prNum: PrNums.TRUST_CHECK_UNTRUSTED, isPublic: false }).toExistAsABuild();
});
[true].forEach(isPublic => {
const build = isPublic ? BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : BuildNums.TRUST_CHECK_UNTRUSTED;
const prNum = isPublic ? PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : PrNums.TRUST_CHECK_UNTRUSTED;
const label = isPublic ? 'public' : 'non-public';
const overwriteRe = RegExp(`^Request to overwrite existing ${label} directory`);
const statusCode = isPublic ? 201 : 202;
describe(`for ${label} builds`, () => {
it('should extract the contents of the build artifact', async () => {
await curl(payload(build))
.then(h.verifyResponse(statusCode));
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic))
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /index.html`);
expect(h.readBuildFile(prNum, SHA, 'foo/bar.js', isPublic))
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /foo/bar.js`);
expect({ prNum, isPublic }).toExistAsABuild();
});
it(`should create files/directories owned by '${AIO_WWW_USER}'`, async () => {
await curl(payload(build))
.then(h.verifyResponse(statusCode));
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
const { stdout: allFiles } = await h.runCmd(`find ${shaDir}`);
const { stdout: userFiles } = await h.runCmd(`find ${shaDir} -user ${AIO_WWW_USER}`);
expect(userFiles).toBe(allFiles);
expect(userFiles).toContain(shaDir);
expect(userFiles).toContain(join(shaDir, 'index.html'));
expect(userFiles).toContain(join(shaDir, 'foo', 'bar.js'));
expect({ prNum, isPublic }).toExistAsABuild();
});
it('should delete the build artifact file', async () => {
await curl(payload(build))
.then(h.verifyResponse(statusCode));
expect({ prNum, SHA }).not.toExistAsAnArtifact();
expect({ prNum, isPublic }).toExistAsABuild();
});
it('should make the build directory non-writable', async () => {
await curl(payload(build))
.then(h.verifyResponse(statusCode));
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
const isNotWritable = (fileOrDir: string) => {
const mode = fs.statSync(fileOrDir).mode;
// tslint:disable-next-line: no-bitwise
return !(mode & parseInt('222', 8));
};
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
expect(isNotWritable(shaDir)).toBe(true);
expect(isNotWritable(join(shaDir, 'index.html'))).toBe(true);
expect(isNotWritable(join(shaDir, 'foo', 'bar.js'))).toBe(true);
expect({ prNum, isPublic }).toExistAsABuild();
});
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)',
async () => {
// It is possible that 40-chars long build directories exist, if they had been deployed
// before implementing the shorter build directory names. In that case, we don't want the
// second (shorter) name to be considered the same as the old one (even if they originate
// from the same SHA).
h.createDummyBuild(prNum, SHA, isPublic, false, true);
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic, true);
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
await curl(payload(build))
.then(h.verifyResponse(statusCode));
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, false)).toContain('index.html');
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
expect({ prNum, isPublic, sha: SHA, isLegacy: false }).toExistAsABuild();
expect({ prNum, isPublic, sha: SHA, isLegacy: true }).toExistAsABuild();
});
it(`should not overwrite existing builds`, async () => {
// setup a build already in place
h.createDummyBuild(prNum, SHA, isPublic);
// distinguish this build from the downloaded one
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic);
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic)).toBe('My content');
expect({ prNum, isPublic }).toExistAsABuild();
expect({ prNum }).toExistAsAnArtifact();
});
it(`should not overwrite existing builds (even if the SHA is different)`, async () => {
// Since only the first few characters of the SHA are used, it is possible for two different
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
// overwrite the first.
expect(SIMILAR_SHA).not.toEqual(SHA);
expect(computeShortSha(SIMILAR_SHA)).toEqual(computeShortSha(SHA));
h.createDummyBuild(prNum, SIMILAR_SHA, isPublic);
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toContain('index.html');
h.writeBuildFile(prNum, SIMILAR_SHA, 'index.html', 'My content', isPublic);
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
expect({ prNum, isPublic, sha: SIMILAR_SHA }).toExistAsABuild();
expect({ prNum, sha: SIMILAR_SHA }).toExistAsAnArtifact();
});
it('should only delete the SHA directory on error (for existing PR)', async () => {
h.createDummyBuild(prNum, ALT_SHA, isPublic);
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
expect({ prNum, isPublic, sha: SHA }).not.toExistAsABuild();
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
});
describe('when the PR\'s visibility has changed', () => {
it('should update the PR\'s visibility', async () => {
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
await curl(payload(build)).then(h.verifyResponse(statusCode));
expect({ prNum, isPublic }).toExistAsABuild();
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
});
it('should not overwrite existing builds (but keep the updated visibility)', async () => {
h.createDummyBuild(prNum, SHA, !isPublic);
await curl(payload(build)).then(h.verifyResponse(409));
expect({ prNum, isPublic }).toExistAsABuild();
expect({ prNum, isPublic: !isPublic }).not.toExistAsABuild();
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
expect({ prNum }).toExistAsAnArtifact();
});
it('should reject the request if it fails to update the PR\'s visibility', async () => {
// One way to cause an error is to have both a public and a hidden directory for the same PR.
h.createDummyBuild(prNum, ALT_SHA, isPublic);
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(prNum, !isPublic)}' ` +
`to existing directory '${h.getPrDir(prNum, isPublic)}'.`);
await curl(payload(build)).then(h.verifyResponse(409, errorRegex));
expect({ prNum, isPublic }).not.toExistAsABuild();
// The bad folders should have been deleted
expect({ prNum, sha: ALT_SHA, isPublic }).toExistAsABuild();
expect({ prNum, sha: ALT_SHA, isPublic: !isPublic }).toExistAsABuild();
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
expect({ prNum }).toExistAsAnArtifact();
});
});
});
});
});
describe(`${host}/health-check`, () => {
it('should respond with 200', done => {
Promise.all([
h.runCmd(`curl -iL ${host}/health-check`).then(h.verifyResponse(200)),
h.runCmd(`curl -iL ${host}/health-check/`).then(h.verifyResponse(200)),
]).then(done);
});
it('should respond with 404 if the path does not match exactly', done => {
Promise.all([
h.runCmd(`curl -iL ${host}/health-check/foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${host}/health-check-foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${host}/health-checknfoo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${host}/foo/health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${host}/foo-health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${host}/foonhealth-check`).then(h.verifyResponse(404)),
]).then(done);
});
});
describe(`${host}/pr-updated`, () => {
const curl = makeCurl(`${host}/pr-updated`);
it('should disallow non-POST requests', async () => {
const bodyRegex = /^Unknown resource in request/;
await Promise.all([
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
]);
});
it('should respond with 400 for requests without a payload', async () => {
const bodyRegex = /^Missing or empty 'number' field in request/;
await Promise.all([
curl({ data: '' }).then(h.verifyResponse(400, bodyRegex)),
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
]);
});
it('should respond with 400 for requests without a \'number\' field', async () => {
const bodyRegex = /^Missing or empty 'number' field in request/;
await Promise.all([
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
curl({ data: { number: null} }).then(h.verifyResponse(400, bodyRegex)),
]);
});
it('should reject requests for which checking the PR visibility fails', async () => {
await curl({ data: { number: PrNums.TRUST_CHECK_ERROR } }).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
});
it('should respond with 404 for unknown paths', done => {
const mockPayload = JSON.stringify({number: 1}); // MockExternalApiFlags.TRUST_CHECK_ACTIVE_TRUSTED_USER });
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" ${host}`;
Promise.all([
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
]).then(done);
});
it('should do nothing if PR\'s visibility is already up-to-date', async () => {
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
const checkVisibilities = (remove: boolean) => {
// Public build is already public.
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild(remove);
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild(remove);
// Hidden build is already hidden.
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild(remove);
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild(remove);
};
h.createDummyBuild(publicPr, SHA, true);
h.createDummyBuild(hiddenPr, SHA, false);
checkVisibilities(false);
await Promise.all([
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
]);
// Visibilities should not have changed, because the specified action could not have triggered a change.
checkVisibilities(true);
});
it('should do nothing if \'action\' implies no visibility change', async () => {
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
const checkVisibilities = (remove: boolean) => {
// Public build is hidden atm.
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(remove);
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(remove);
// Hidden build is public atm.
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(remove);
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(remove);
};
h.createDummyBuild(publicPr, SHA, false);
h.createDummyBuild(hiddenPr, SHA, true);
checkVisibilities(false);
await Promise.all([
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
]);
// Visibilities should not have changed, because the specified action could not have triggered a change.
checkVisibilities(true);
});
describe('when the visiblity has changed', () => {
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
beforeEach(() => {
// Create initial PR builds with opposite visibilities as the ones that will be reported:
// - The now public PR was previously hidden.
// - The now hidden PR was previously public.
h.createDummyBuild(publicPr, SHA, false);
h.createDummyBuild(hiddenPr, SHA, true);
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(false);
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(false);
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(false);
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(false);
});
afterEach(() => {
// Expect PRs' visibility to have been updated:
// - The public PR should be actually public (previously it was hidden).
// - The hidden PR should be actually hidden (previously it was public).
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
});
it('should update the PR\'s visibility (action: undefined)', async () => {
await Promise.all([
curl({ data: {number: +publicPr } }).then(h.verifyResponse(200)),
curl({ data: {number: +hiddenPr } }).then(h.verifyResponse(200)),
]);
});
it('should update the PR\'s visibility (action: labeled)', async () => {
await Promise.all([
curl({ data: {number: +publicPr, action: 'labeled' } }).then(h.verifyResponse(200)),
curl({ data: {number: +hiddenPr, action: 'labeled' } }).then(h.verifyResponse(200)),
]);
});
it('should update the PR\'s visibility (action: unlabeled)', async () => {
await Promise.all([
curl({ data: {number: +publicPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
curl({ data: {number: +hiddenPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
]);
});
});
});
describe(`${host}/*`, () => {
it('should respond with 404 for requests to unknown URLs', done => {
const bodyRegex = /^Unknown resource/;
Promise.all([
h.runCmd(`curl -iL ${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL ${host}/`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL ${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PUT ${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX POST ${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PATCH ${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX DELETE ${host}`).then(h.verifyResponse(404, bodyRegex)),
]).then(done);
});
});
});

View File

@ -1,101 +1,80 @@
// Imports // Imports
import * as path from 'path'; import {AIO_NGINX_HOSTNAME} from '../common/env-variables';
import * as c from './constants'; import {computeShortSha} from '../common/utils';
import {helper as h} from './helper'; import {ALT_SHA, BuildNums, PrNums, SHA} from './constants';
import {helper as h, makeCurl, payload} from './helper';
import {customMatchers} from './jasmine-custom-matchers';
// Tests // Tests
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => { h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
const hostname = h.nginxHostname; const hostname = AIO_NGINX_HOSTNAME;
const host = `${hostname}:${port}`; const host = `${hostname}:${port}`;
const pr9 = '9'; const curlPrUpdated = makeCurl(`${scheme}://${host}/pr-updated`);
const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40);
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const getFile = (pr: string, sha: string, file: string) => const getFile = (pr: number, sha: string, file: string) =>
h.runCmd(`curl -iL ${scheme}://pr${pr}-${h.getShordSha(sha)}.${host}/${file}`); h.runCmd(`curl -iL ${scheme}://pr${pr}-${computeShortSha(sha)}.${host}/${file}`);
const uploadBuild = (pr: string, sha: string, archive: string, authHeader = 'Token FOO') => { const prUpdated = (prNum: number, action?: string) => curlPrUpdated({ data: { number: prNum, action } });
const curlPost = `curl -iLX POST --header "Authorization: ${authHeader}"`; const circleBuild = makeCurl(`${scheme}://${host}/circle-build`);
return h.runCmd(`${curlPost} --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
};
const prUpdated = (pr: number, action?: string) => {
const url = `${scheme}://${host}/pr-updated`;
const payloadStr = JSON.stringify({number: pr, action});
return h.runCmd(`curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`);
};
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000); beforeEach(() => {
afterEach(() => { jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000;
h.deletePrDir(pr9); jasmine.addMatchers(customMatchers);
h.deletePrDir(pr9, false);
h.cleanUp();
}); });
afterEach(() => h.cleanUp());
describe('for a new/non-existing PR', () => { describe('for a new/non-existing PR', () => {
it('should be able to upload and serve a public build', done => { it('should be able to create and serve a public preview', async () => {
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyArchive(pr9, sha9, archivePath); const regexPrefix = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
uploadBuild(pr9, sha9, archivePath). await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
then(() => Promise.all([ await Promise.all([
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)), getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)), getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
])). ]);
then(done);
expect({ prNum: PR }).toExistAsABuild();
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
}); });
it('should be able to upload but not serve a hidden build', done => { it('should be able to create but not serve a hidden preview', async () => {
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_UNTRUSTED;
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyArchive(pr9, sha9, archivePath); await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
await Promise.all([
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
]);
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted). expect({ prNum: PR }).not.toExistAsABuild();
then(() => Promise.all([ expect({ prNum: PR, isPublic: false }).toExistAsABuild();
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
])).
then(() => {
expect(h.buildExists(pr9, sha9)).toBe(false);
expect(h.buildExists(pr9, sha9, false)).toBe(true);
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
}).
then(done);
}); });
it('should reject an upload if verification fails', done => { it('should reject if verification fails', async () => {
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`); const BUILD = BuildNums.TRUST_CHECK_ERROR;
const PR = PrNums.TRUST_CHECK_ERROR;
h.createDummyArchive(pr9, sha9, archivePath); await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
expect({ prNum: PR }).toExistAsAnArtifact();
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error). expect({ prNum: PR }).not.toExistAsABuild();
then(h.verifyResponse(403, errorRegex9)). expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
then(() => {
expect(h.buildExists(pr9)).toBe(false);
expect(h.buildExists(pr9, '', false)).toBe(false);
}).
then(done);
}); });
it('should be able to notify that a PR has been updated (and do nothing)', done => { it('should be able to notify that a PR has been updated (and do nothing)', async () => {
prUpdated(+pr9). await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(200));
then(h.verifyResponse(200)). // The PR should still not exist.
then(() => { expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).not.toExistAsABuild();
// The PR should still not exist. expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).not.toExistAsABuild();
expect(h.buildExists(pr9, '', false)).toBe(false);
expect(h.buildExists(pr9, '', true)).toBe(false);
}).
then(done);
}); });
}); });
@ -103,215 +82,186 @@ h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme
describe('for an existing PR', () => { describe('for an existing PR', () => {
it('should be able to upload and serve a public build', done => { it('should be able to create and serve a public preview', async () => {
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`; const regexPrefix1 = `^PR: ${PR} \\| SHA: ${ALT_SHA} \\| File:`;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); const idxContentRegex1 = new RegExp(`${regexPrefix1} \\/index\\.html$`);
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`); const barContentRegex1 = new RegExp(`${regexPrefix1} \\/foo\\/bar\\.js$`);
h.createDummyBuild(pr9, sha0); const regexPrefix2 = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
h.createDummyArchive(pr9, sha9, archivePath); const idxContentRegex2 = new RegExp(`${regexPrefix2} \\/index\\.html$`);
const barContentRegex2 = new RegExp(`${regexPrefix2} \\/foo\\/bar\\.js$`);
uploadBuild(pr9, sha9, archivePath). h.createDummyBuild(PR, ALT_SHA);
then(() => Promise.all([ await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)), await Promise.all([
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)), getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex1)),
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)), getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex1)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)), getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex2)),
])). getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex2)),
then(done); ]);
expect({ prNum: PR, sha: SHA }).toExistAsABuild();
expect({ prNum: PR, sha: ALT_SHA }).toExistAsABuild();
}); });
it('should be able to upload but not serve a hidden build', done => { it('should be able to create but not serve a hidden preview', async () => {
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_UNTRUSTED;
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`; h.createDummyBuild(PR, ALT_SHA, false);
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyBuild(pr9, sha0, false); await Promise.all([
h.createDummyArchive(pr9, sha9, archivePath); getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(404)),
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(404)),
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
]);
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted). expect({ prNum: PR, sha: SHA }).not.toExistAsABuild();
then(() => Promise.all([ expect({ prNum: PR, sha: SHA, isPublic: false }).toExistAsABuild();
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(404)), expect({ prNum: PR, sha: ALT_SHA }).not.toExistAsABuild();
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(404)), expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
])).
then(() => {
expect(h.buildExists(pr9, sha9)).toBe(false);
expect(h.buildExists(pr9, sha9, false)).toBe(true);
expect(h.readBuildFile(pr9, sha0, 'index.html', false)).toMatch(idxContentRegex0);
expect(h.readBuildFile(pr9, sha0, 'foo/bar.js', false)).toMatch(barContentRegex0);
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
}).
then(done);
}); });
it('should reject an upload if verification fails', done => { it('should reject if verification fails', async () => {
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`); const BUILD = BuildNums.TRUST_CHECK_ERROR;
const PR = PrNums.TRUST_CHECK_ERROR;
h.createDummyBuild(pr9, sha0); h.createDummyBuild(PR, ALT_SHA, false);
h.createDummyArchive(pr9, sha9, archivePath);
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error). await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
then(h.verifyResponse(403, errorRegex9)).
then(() => {
expect(h.buildExists(pr9)).toBe(true);
expect(h.buildExists(pr9, sha0)).toBe(true);
expect(h.buildExists(pr9, sha9)).toBe(false);
}).
then(done);
expect({ prNum: PR }).toExistAsAnArtifact();
expect({ prNum: PR }).not.toExistAsABuild();
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
}); });
it('should not be able to overwrite an existing public build', done => { it('should not be able to overwrite an existing public preview', async () => {
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyBuild(pr9, sha9); const regexPrefix = `^PR: ${PR} \\| SHA: ${SHA} \\| File:`;
h.createDummyArchive(pr9, sha9, archivePath); const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
uploadBuild(pr9, sha9, archivePath). h.createDummyBuild(PR, SHA);
then(h.verifyResponse(409)).
then(() => Promise.all([ await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)), await Promise.all([
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)), getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
])). getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
then(done); ]);
expect({ prNum: PR }).toExistAsAnArtifact();
expect({ prNum: PR }).toExistAsABuild();
}); });
it('should not be able to overwrite an existing hidden build', done => { it('should not be able to overwrite an existing hidden preview', async () => {
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`; const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`); const PR = PrNums.TRUST_CHECK_UNTRUSTED;
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`); h.createDummyBuild(PR, SHA, false);
h.createDummyBuild(pr9, sha9, false); await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
h.createDummyArchive(pr9, sha9, archivePath);
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted). expect({ prNum: PR }).toExistAsAnArtifact();
then(h.verifyResponse(409)). expect({ prNum: PR, isPublic: false }).toExistAsABuild();
then(() => {
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
}).
then(done);
}); });
it('should be able to request re-checking visibility (if outdated)', done => { it('should be able to request re-checking visibility (if outdated)', async () => {
const publicPr = pr9; const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted); const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
h.createDummyBuild(publicPr, sha9, false); h.createDummyBuild(publicPr, SHA, false);
h.createDummyBuild(hiddenPr, sha9, true); h.createDummyBuild(hiddenPr, SHA, true);
// PR visibilities are outdated (i.e. the opposte of what the should). // PR visibilities are outdated (i.e. the opposte of what the should).
expect(h.buildExists(publicPr, '', false)).toBe(true); expect({ prNum: publicPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
expect(h.buildExists(publicPr, '', true)).toBe(false); expect({ prNum: publicPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
expect(h.buildExists(hiddenPr, '', false)).toBe(false); expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
expect(h.buildExists(hiddenPr, '', true)).toBe(true); expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
Promise. await Promise.all([
all([ prUpdated(publicPr).then(h.verifyResponse(200)),
prUpdated(+publicPr).then(h.verifyResponse(200)), prUpdated(hiddenPr).then(h.verifyResponse(200)),
prUpdated(+hiddenPr).then(h.verifyResponse(200)), ]);
]).
then(() => { // PR visibilities should have been updated.
// PR visibilities should have been updated. expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
expect(h.buildExists(publicPr, '', false)).toBe(false); expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
expect(h.buildExists(publicPr, '', true)).toBe(true); expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
expect(h.buildExists(hiddenPr, '', false)).toBe(true); expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
}).
then(() => {
h.deletePrDir(publicPr, true);
h.deletePrDir(hiddenPr, false);
}).
then(done);
}); });
it('should be able to request re-checking visibility (if up-to-date)', done => { it('should be able to request re-checking visibility (if up-to-date)', async () => {
const publicPr = pr9; const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted); const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
h.createDummyBuild(publicPr, sha9, true); h.createDummyBuild(publicPr, SHA, true);
h.createDummyBuild(hiddenPr, sha9, false); h.createDummyBuild(hiddenPr, SHA, false);
// PR visibilities are already up-to-date. // PR visibilities are already up-to-date.
expect(h.buildExists(publicPr, '', false)).toBe(false); expect({ prNum: publicPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
expect(h.buildExists(publicPr, '', true)).toBe(true); expect({ prNum: publicPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
expect(h.buildExists(hiddenPr, '', false)).toBe(true); expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
expect(h.buildExists(hiddenPr, '', true)).toBe(false); expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
Promise. await Promise.all([
all([ prUpdated(publicPr).then(h.verifyResponse(200)),
prUpdated(+publicPr).then(h.verifyResponse(200)), prUpdated(hiddenPr).then(h.verifyResponse(200)),
prUpdated(+hiddenPr).then(h.verifyResponse(200)), ]);
]).
then(() => { // PR visibilities are still up-to-date.
// PR visibilities are still up-to-date. expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
expect(h.buildExists(publicPr, '', false)).toBe(false); expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
expect(h.buildExists(publicPr, '', true)).toBe(true); expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
expect(h.buildExists(hiddenPr, '', false)).toBe(true); expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
}).
then(done);
}); });
it('should reject a request if re-checking visibility fails', done => { it('should reject a request if re-checking visibility fails', async () => {
const errorPr = String(c.BV_getPrIsTrusted_error); const errorPr = PrNums.TRUST_CHECK_ERROR;
h.createDummyBuild(errorPr, sha9, true); h.createDummyBuild(errorPr, SHA, true);
expect(h.buildExists(errorPr, '', false)).toBe(false); expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild(false);
expect(h.buildExists(errorPr, '', true)).toBe(true); expect({ prNum: errorPr, isPublic: true }).toExistAsABuild(false);
prUpdated(+errorPr). await prUpdated(errorPr).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
then(h.verifyResponse(500, /Test/)).
then(() => { // PR visibility should not have been updated.
// PR visibility should not have been updated. expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild();
expect(h.buildExists(errorPr, '', false)).toBe(false); expect({ prNum: errorPr, isPublic: true }).toExistAsABuild();
expect(h.buildExists(errorPr, '', true)).toBe(true);
}).
then(done);
}); });
it('should reject a request if updating visibility fails', done => { it('should reject a request if updating visibility fails', async () => {
// One way to cause an error is to have both a public and a hidden directory for the same PR. // One way to cause an error is to have both a public and a hidden directory for the same PR.
h.createDummyBuild(pr9, sha9, false); h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, false);
h.createDummyBuild(pr9, sha9, true); h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, true);
const hiddenPrDir = h.getPrDir(pr9, false); const hiddenPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, false);
const publicPrDir = h.getPrDir(pr9, true); const publicPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, true);
const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`); const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`);
expect(h.buildExists(pr9, '', false)).toBe(true); expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild(false);
expect(h.buildExists(pr9, '', true)).toBe(true); expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild(false);
prUpdated(+pr9). await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(409, bodyRegex));
then(h.verifyResponse(409, bodyRegex)).
then(() => { // PR visibility should not have been updated.
// PR visibility should not have been updated. expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild();
expect(h.buildExists(pr9, '', false)).toBe(true); expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild();
expect(h.buildExists(pr9, '', true)).toBe(true);
}).
then(done);
}); });
}); });

View File

@ -0,0 +1,2 @@
import '../preview-server';
import './mock-external-apis';

View File

@ -1,38 +0,0 @@
// Imports
import {GithubPullRequests} from '../common/github-pull-requests';
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../upload-server/build-verifier';
import {UploadError} from '../upload-server/upload-error';
import * as c from './constants';
// Run
// TODO(gkalpak): Add e2e tests to cover these interactions as well.
GithubPullRequests.prototype.addComment = () => Promise.resolve();
BuildVerifier.prototype.getPrIsTrusted = (pr: number) => {
switch (pr) {
case c.BV_getPrIsTrusted_error:
// For e2e tests, fake an error.
return Promise.reject('Test');
case c.BV_getPrIsTrusted_notTrusted:
// For e2e tests, fake an untrusted PR (`false`).
return Promise.resolve(false);
default:
// For e2e tests, default to trusted PRs (`true`).
return Promise.resolve(true);
}
};
BuildVerifier.prototype.verify = (expectedPr: number, authHeader: string) => {
switch (authHeader) {
case c.BV_verify_error:
// For e2e tests, fake a verification error.
return Promise.reject(new UploadError(403, `Error while verifying upload for PR ${expectedPr}: Test`));
case c.BV_verify_verifiedNotTrusted:
// For e2e tests, fake a `verifiedNotTrusted` verification status.
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted);
default:
// For e2e tests, default to `verifiedAndTrusted` verification status.
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedAndTrusted);
}
};
// tslint:disable-next-line: no-var-requires
require('../upload-server/index');

View File

@ -0,0 +1,30 @@
declare module 'tar-stream' {
import {Readable, Writable} from 'stream';
export interface Pack extends Readable {
entry(header: Header, callback?: (err?: any) => {}): Writable;
entry(header: Header, contents: string, callback?: (err?: any) => {}): Writable;
entry(header: Header, buffer: Buffer, callback?: (err?: any) => {}): Writable;
entry(header: Header, buffer: string|Buffer, callback?: (err?: any) => {}): Writable;
finalize();
destroy(err: any);
}
export interface Header {
name: string;
mode?: number;
uid?: number;
gid?: number;
size?: number;
mtime?: Date;
type?: type;
linkname?: string;
uname?: string;
gname?: string;
devmajor?: number;
devminor?: number;
}
export function pack(): Pack;
}

View File

@ -1,571 +0,0 @@
// Imports
import * as fs from 'fs';
import * as path from 'path';
import * as c from './constants';
import {CmdResult, helper as h} from './helper';
// Tests
describe('upload-server (on HTTP)', () => {
const hostname = h.uploadHostname;
const port = h.uploadPort;
const host = `${hostname}:${port}`;
const pr = '9';
const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40);
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
afterEach(() => h.cleanUp());
describe(`${host}/create-build/<pr>/<sha>`, () => {
const authorizationHeader = `--header "Authorization: Token FOO"`;
const xFileHeader = `--header "X-File: ${h.buildsDir}/snapshot.tar.gz"`;
const defaultHeaders = `${authorizationHeader} ${xFileHeader}`;
const curl = (url: string, headers = defaultHeaders) => `curl -iL ${headers} ${url}`;
it('should disallow non-GET requests', done => {
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = /^Unknown resource/;
Promise.all([
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
]).then(done);
});
it('should reject requests without an \'AUTHORIZATION\' header', done => {
const headers1 = '';
const headers2 = '--header "AUTHORIXATION: "';
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = /^Missing or empty 'AUTHORIZATION' header/;
Promise.all([
h.runCmd(curl(url, headers1)).then(h.verifyResponse(401, bodyRegex)),
h.runCmd(curl(url, headers2)).then(h.verifyResponse(401, bodyRegex)),
]).then(done);
});
it('should reject requests without an \'X-FILE\' header', done => {
const headers1 = authorizationHeader;
const headers2 = `${authorizationHeader} --header "X-FILE: "`;
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = /^Missing or empty 'X-FILE' header/;
Promise.all([
h.runCmd(curl(url, headers1)).then(h.verifyResponse(400, bodyRegex)),
h.runCmd(curl(url, headers2)).then(h.verifyResponse(400, bodyRegex)),
]).then(done);
});
it('should reject requests for which the PR verification fails', done => {
const headers = `--header "Authorization: ${c.BV_verify_error}" ${xFileHeader}`;
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = new RegExp(`Error while verifying upload for PR ${pr}: Test`);
h.runCmd(curl(url, headers)).
then(h.verifyResponse(403, bodyRegex)).
then(done);
});
it('should respond with 404 for unknown paths', done => {
const cmdPrefix = curl(`http://${host}`);
Promise.all([
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
]).then(done);
});
it('should reject PRs with leading zeros', done => {
h.runCmd(curl(`http://${host}/create-build/0${pr}/${sha9}`)).
then(h.verifyResponse(404)).
then(done);
});
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
Promise.all([
h.runCmd(curl(`http://${host}/create-build/${pr}/0${sha9}`)).then(h.verifyResponse(404)),
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha9}`)).then(h.verifyResponse(500)),
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha0}`)).then(h.verifyResponse(500)),
]).then(done);
});
[true, false].forEach(isPublic => describe(`(for ${isPublic ? 'public' : 'hidden'} builds)`, () => {
const authorizationHeader2 = isPublic ?
authorizationHeader : `--header "Authorization: ${c.BV_verify_verifiedNotTrusted}"`;
const cmdPrefix = curl('', `${authorizationHeader2} ${xFileHeader}`);
const overwriteRe = RegExp(`^Request to overwrite existing ${isPublic ? 'public' : 'non-public'} directory`);
it('should not overwrite existing builds', done => {
h.createDummyBuild(pr, sha9, isPublic);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(409, overwriteRe)).
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
then(done);
});
it('should not overwrite existing builds (even if the SHA is different)', done => {
// Since only the first few characters of the SHA are used, it is possible for two different
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
// overwrite the first.
const sha9Almost = sha9.replace(/.$/, '8');
expect(sha9Almost).not.toBe(sha9);
h.createDummyBuild(pr, sha9, isPublic);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9Almost}`).
then(h.verifyResponse(409, overwriteRe)).
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
then(done);
});
it('should delete the PR directory on error (for new PR)', done => {
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(500)).
then(() => expect(h.buildExists(pr, '', isPublic)).toBe(false)).
then(done);
});
it('should only delete the SHA directory on error (for existing PR)', done => {
h.createDummyBuild(pr, sha0, isPublic);
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(500)).
then(() => {
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
expect(h.buildExists(pr, '', isPublic)).toBe(true);
}).
then(done);
});
describe('on successful upload', () => {
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const statusCode = isPublic ? 201 : 202;
let uploadPromise: Promise<CmdResult>;
beforeEach(() => {
h.createDummyArchive(pr, sha9, archivePath);
uploadPromise = h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`);
});
afterEach(() => h.deletePrDir(pr, isPublic));
it(`should respond with ${statusCode}`, done => {
uploadPromise.then(h.verifyResponse(statusCode)).then(done);
});
it('should extract the contents of the uploaded file', done => {
uploadPromise.
then(() => {
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
expect(h.readBuildFile(pr, sha9, 'foo/bar.js', isPublic)).toContain(`uploaded/${pr}`);
}).
then(done);
});
it(`should create files/directories owned by '${h.wwwUser}'`, done => {
const prDir = h.getPrDir(pr, isPublic);
const shaDir = h.getShaDir(prDir, sha9);
const idxPath = path.join(shaDir, 'index.html');
const barPath = path.join(shaDir, 'foo', 'bar.js');
uploadPromise.
then(() => Promise.all([
h.runCmd(`find ${shaDir}`),
h.runCmd(`find ${shaDir} -user ${h.wwwUser}`),
])).
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
expect(userFiles).toBe(allFiles);
expect(userFiles).toContain(shaDir);
expect(userFiles).toContain(idxPath);
expect(userFiles).toContain(barPath);
}).
then(done);
});
it('should delete the uploaded file', done => {
expect(fs.existsSync(archivePath)).toBe(true);
uploadPromise.
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
then(done);
});
it('should make the build directory non-writable', done => {
const prDir = h.getPrDir(pr, isPublic);
const shaDir = h.getShaDir(prDir, sha9);
const idxPath = path.join(shaDir, 'index.html');
const barPath = path.join(shaDir, 'foo', 'bar.js');
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
const isNotWritable = (fileOrDir: string) => {
const mode = fs.statSync(fileOrDir).mode;
// tslint:disable-next-line: no-bitwise
return !(mode & parseInt('222', 8));
};
uploadPromise.
then(() => {
expect(isNotWritable(shaDir)).toBe(true);
expect(isNotWritable(idxPath)).toBe(true);
expect(isNotWritable(barPath)).toBe(true);
}).
then(done);
});
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)', done => {
// It is possible that 40-chars long build directories exist, if they had been deployed
// before implementing the shorter build directory names. In that case, we don't want the
// second (shorter) name to be considered the same as the old one (even if they originate
// from the same SHA).
h.createDummyBuild(pr, sha9, isPublic, false, true);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toContain('index.html');
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic, true);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(statusCode)).
then(() => {
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
expect(h.buildExists(pr, sha9, isPublic, true)).toBe(true);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
}).
then(done);
});
});
describe('when the PR\'s visibility has changed', () => {
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const statusCode = isPublic ? 201 : 202;
const checkPrVisibility = (isPublic2: boolean) => {
expect(h.buildExists(pr, '', isPublic2)).toBe(true);
expect(h.buildExists(pr, '', !isPublic2)).toBe(false);
expect(h.buildExists(pr, sha0, isPublic2)).toBe(true);
expect(h.buildExists(pr, sha0, !isPublic2)).toBe(false);
};
const uploadBuild = (sha: string) => h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha}`);
beforeEach(() => {
h.createDummyBuild(pr, sha0, !isPublic);
h.createDummyArchive(pr, sha9, archivePath);
checkPrVisibility(!isPublic);
});
afterEach(() => h.deletePrDir(pr, isPublic));
it('should update the PR\'s visibility', done => {
uploadBuild(sha9).
then(h.verifyResponse(statusCode)).
then(() => {
checkPrVisibility(isPublic);
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(sha9);
}).
then(done);
});
it('should not overwrite existing builds (but keep the updated visibility)', done => {
expect(h.buildExists(pr, sha0, isPublic)).toBe(false);
uploadBuild(sha0).
then(h.verifyResponse(409, overwriteRe)).
then(() => {
checkPrVisibility(isPublic);
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(pr);
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(`uploaded/${pr}`);
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(sha0);
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(sha9);
}).
then(done);
});
it('should reject the request if it fails to update the PR\'s visibility', done => {
// One way to cause an error is to have both a public and a hidden directory for the same PR.
h.createDummyBuild(pr, sha0, isPublic);
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(pr, !isPublic)}' ` +
`to existing directory '${h.getPrDir(pr, isPublic)}'.`);
uploadBuild(sha9).
then(h.verifyResponse(409, errorRegex)).
then(() => {
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
expect(h.buildExists(pr, sha9, !isPublic)).toBe(false);
}).
then(done);
});
});
}));
});
describe(`${host}/health-check`, () => {
it('should respond with 200', done => {
Promise.all([
h.runCmd(`curl -iL http://${host}/health-check`).then(h.verifyResponse(200)),
h.runCmd(`curl -iL http://${host}/health-check/`).then(h.verifyResponse(200)),
]).then(done);
});
it('should respond with 404 if the path does not match exactly', done => {
Promise.all([
h.runCmd(`curl -iL http://${host}/health-check/foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/health-check-foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/health-checknfoo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foo/health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foo-health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foonhealth-check`).then(h.verifyResponse(404)),
]).then(done);
});
});
describe(`${host}/pr-updated`, () => {
const url = `http://${host}/pr-updated`;
// Helpers
const curl = (payload?: {number: number, action?: string}) => {
const payloadStr = payload && JSON.stringify(payload) || '';
return `curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`;
};
it('should disallow non-POST requests', done => {
const bodyRegex = /^Unknown resource in request/;
Promise.all([
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
]).then(done);
});
it('should respond with 400 for requests without a payload', done => {
const bodyRegex = /^Missing or empty 'number' field in request/;
h.runCmd(curl()).
then(h.verifyResponse(400, bodyRegex)).
then(done);
});
it('should respond with 400 for requests without a \'number\' field', done => {
const bodyRegex = /^Missing or empty 'number' field in request/;
Promise.all([
h.runCmd(curl({} as any)).then(h.verifyResponse(400, bodyRegex)),
h.runCmd(curl({number: null} as any)).then(h.verifyResponse(400, bodyRegex)),
]).then(done);
});
it('should reject requests for which checking the PR visibility fails', done => {
h.runCmd(curl({number: c.BV_getPrIsTrusted_error})).
then(h.verifyResponse(500, /Test/)).
then(done);
});
it('should respond with 404 for unknown paths', done => {
const mockPayload = JSON.stringify({number: +pr});
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" http://${host}`;
Promise.all([
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
]).then(done);
});
it('should do nothing if PR\'s visibility is already up-to-date', done => {
const publicPr = pr;
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
const checkVisibilities = () => {
// Public build is already public.
expect(h.buildExists(publicPr, '', false)).toBe(false);
expect(h.buildExists(publicPr, '', true)).toBe(true);
// Hidden build is already hidden.
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
};
h.createDummyBuild(publicPr, sha9, true);
h.createDummyBuild(hiddenPr, sha9, false);
checkVisibilities();
Promise.
all([
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
]).
// Visibilities should not have changed, because the specified action could not have triggered a change.
then(checkVisibilities).
then(done);
});
it('should do nothing if \'action\' implies no visibility change', done => {
const publicPr = pr;
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
const checkVisibilities = () => {
// Public build is hidden atm.
expect(h.buildExists(publicPr, '', false)).toBe(true);
expect(h.buildExists(publicPr, '', true)).toBe(false);
// Hidden build is public atm.
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
};
h.createDummyBuild(publicPr, sha9, false);
h.createDummyBuild(hiddenPr, sha9, true);
checkVisibilities();
Promise.
all([
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
]).
// Visibilities should not have changed, because the specified action could not have triggered a change.
then(checkVisibilities).
then(done);
});
describe('when the visiblity has changed', () => {
const publicPr = pr;
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
beforeEach(() => {
// Create initial PR builds with opposite visibilities as the ones that will be reported:
// - The now public PR was previously hidden.
// - The now hidden PR was previously public.
h.createDummyBuild(publicPr, sha9, false);
h.createDummyBuild(hiddenPr, sha9, true);
expect(h.buildExists(publicPr, '', false)).toBe(true);
expect(h.buildExists(publicPr, '', true)).toBe(false);
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
});
afterEach(() => {
// Expect PRs' visibility to have been updated:
// - The public PR should be actually public (previously it was hidden).
// - The hidden PR should be actually hidden (previously it was public).
expect(h.buildExists(publicPr, '', false)).toBe(false);
expect(h.buildExists(publicPr, '', true)).toBe(true);
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
h.deletePrDir(publicPr, true);
h.deletePrDir(hiddenPr, false);
});
it('should update the PR\'s visibility (action: undefined)', done => {
Promise.all([
h.runCmd(curl({number: +publicPr})).then(h.verifyResponse(200)),
h.runCmd(curl({number: +hiddenPr})).then(h.verifyResponse(200)),
]).then(done);
});
it('should update the PR\'s visibility (action: labeled)', done => {
Promise.all([
h.runCmd(curl({number: +publicPr, action: 'labeled'})).then(h.verifyResponse(200)),
h.runCmd(curl({number: +hiddenPr, action: 'labeled'})).then(h.verifyResponse(200)),
]).then(done);
});
it('should update the PR\'s visibility (action: unlabeled)', done => {
Promise.all([
h.runCmd(curl({number: +publicPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
h.runCmd(curl({number: +hiddenPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
]).then(done);
});
});
});
describe(`${host}/*`, () => {
it('should respond with 404 for requests to unknown URLs', done => {
const bodyRegex = /^Unknown resource/;
Promise.all([
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(404, bodyRegex)),
]).then(done);
});
});
});

View File

@ -21,18 +21,22 @@
}, },
"dependencies": { "dependencies": {
"body-parser": "^1.18.2", "body-parser": "^1.18.2",
"delete-empty": "^2.0.0",
"express": "^4.15.4", "express": "^4.15.4",
"jasmine": "^2.8.0", "jasmine": "^2.8.0",
"jsonwebtoken": "^8.0.1", "nock": "^9.2.5",
"shelljs": "^0.7.8", "node-fetch": "^2.1.2",
"shelljs": "^0.8.1",
"tar-stream": "^1.6.0",
"tslib": "^1.7.1" "tslib": "^1.7.1"
}, },
"devDependencies": { "devDependencies": {
"@types/body-parser": "^1.16.5", "@types/body-parser": "^1.16.5",
"@types/express": "^4.0.37", "@types/express": "^4.0.37",
"@types/jasmine": "^2.6.0", "@types/jasmine": "^2.6.0",
"@types/jsonwebtoken": "^7.2.3", "@types/nock": "^9.1.3",
"@types/node": "^8.0.30", "@types/node": "^8.0.30",
"@types/node-fetch": "^1.6.8",
"@types/shelljs": "^0.8.0", "@types/shelljs": "^0.8.0",
"@types/supertest": "^2.0.3", "@types/supertest": "^2.0.3",
"concurrently": "^3.5.0", "concurrently": "^3.5.0",

View File

@ -1,135 +1,176 @@
// Imports // Imports
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import {normalize} from 'path';
import * as shell from 'shelljs'; import * as shell from 'shelljs';
import {BuildCleaner} from '../../lib/clean-up/build-cleaner'; import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
import {HIDDEN_DIR_PREFIX} from '../../lib/common/constants'; import {HIDDEN_DIR_PREFIX} from '../../lib/common/constants';
import {GithubPullRequests} from '../../lib/common/github-pull-requests'; import {GithubPullRequests} from '../../lib/common/github-pull-requests';
const EXISTING_BUILDS = [10, 20, 30, 40];
const EXISTING_DOWNLOADS = [
'downloads/10-ABCDEF0-build.zip',
'downloads/10-1234567-build.zip',
'downloads/20-ABCDEF0-build.zip',
'downloads/20-1234567-build.zip',
];
const OPEN_PRS = [10, 40];
const ANY_DATE = jasmine.any(String);
// Tests // Tests
describe('BuildCleaner', () => { describe('BuildCleaner', () => {
let cleaner: BuildCleaner; let cleaner: BuildCleaner;
beforeEach(() => cleaner = new BuildCleaner('/foo/bar', 'baz/qux', '12345')); beforeEach(() => {
spyOn(console, 'error');
spyOn(console, 'log');
cleaner = new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', 'downloads', 'build.zip');
});
describe('constructor()', () => { describe('constructor()', () => {
it('should throw if \'buildsDir\' is empty', () => { it('should throw if \'buildsDir\' is empty', () => {
expect(() => new BuildCleaner('', '/baz/qux', '12345')). expect(() => new BuildCleaner('', 'baz', 'qux', '12345', 'downloads', 'build.zip')).
toThrowError('Missing or empty required parameter \'buildsDir\'!'); toThrowError('Missing or empty required parameter \'buildsDir\'!');
}); });
it('should throw if \'repoSlug\' is empty', () => { it('should throw if \'githubOrg\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', '', '12345')). expect(() => new BuildCleaner('/foo/bar', '', 'qux', '12345', 'downloads', 'build.zip')).
toThrowError('Missing or empty required parameter \'repoSlug\'!'); toThrowError('Missing or empty required parameter \'githubOrg\'!');
});
it('should throw if \'githubRepo\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', 'baz', '', '12345', 'downloads', 'build.zip')).
toThrowError('Missing or empty required parameter \'githubRepo\'!');
}); });
it('should throw if \'githubToken\' is empty', () => { it('should throw if \'githubToken\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', 'baz/qux', '')). expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '', 'downloads', 'build.zip')).
toThrowError('Missing or empty required parameter \'githubToken\'!'); toThrowError('Missing or empty required parameter \'githubToken\'!');
}); });
it('should throw if \'downloadsDir\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', '', 'build.zip')).
toThrowError('Missing or empty required parameter \'downloadsDir\'!');
});
it('should throw if \'artifactPath\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', 'downloads', '')).
toThrowError('Missing or empty required parameter \'artifactPath\'!');
});
}); });
describe('cleanUp()', () => { describe('cleanUp()', () => {
let cleanerGetExistingBuildNumbersSpy: jasmine.Spy; let cleanerGetExistingBuildNumbersSpy: jasmine.Spy;
let cleanerGetOpenPrNumbersSpy: jasmine.Spy; let cleanerGetOpenPrNumbersSpy: jasmine.Spy;
let cleanerGetExistingDownloadsSpy: jasmine.Spy;
let cleanerRemoveUnnecessaryBuildsSpy: jasmine.Spy; let cleanerRemoveUnnecessaryBuildsSpy: jasmine.Spy;
let existingBuildsDeferred: {resolve: (v?: any) => void, reject: (e?: any) => void}; let cleanerRemoveUnnecessaryDownloadsSpy: jasmine.Spy;
let openPrsDeferred: {resolve: (v?: any) => void, reject: (e?: any) => void};
let promise: Promise<void>;
beforeEach(() => { beforeEach(() => {
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner as any, 'getExistingBuildNumbers').and.callFake(() => { cleanerGetExistingBuildNumbersSpy = spyOn(cleaner, 'getExistingBuildNumbers')
return new Promise((resolve, reject) => existingBuildsDeferred = {resolve, reject}); .and.callFake(() => Promise.resolve(EXISTING_BUILDS));
}); cleanerGetOpenPrNumbersSpy = spyOn(cleaner, 'getOpenPrNumbers')
cleanerGetOpenPrNumbersSpy = spyOn(cleaner as any, 'getOpenPrNumbers').and.callFake(() => { .and.callFake(() => Promise.resolve(OPEN_PRS));
return new Promise((resolve, reject) => openPrsDeferred = {resolve, reject}); cleanerGetExistingDownloadsSpy = spyOn(cleaner, 'getExistingDownloads')
}); .and.callFake(() => Promise.resolve(EXISTING_DOWNLOADS));
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner as any, 'removeUnnecessaryBuilds');
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner, 'removeUnnecessaryBuilds');
cleanerRemoveUnnecessaryDownloadsSpy = spyOn(cleaner, 'removeUnnecessaryDownloads');
promise = cleaner.cleanUp();
}); });
it('should return a promise', () => { it('should return a promise', () => {
const promise = cleaner.cleanUp();
expect(promise).toEqual(jasmine.any(Promise)); expect(promise).toEqual(jasmine.any(Promise));
}); });
it('should get the existing builds', () => { it('should get the open PRs', async () => {
expect(cleanerGetExistingBuildNumbersSpy).toHaveBeenCalled(); await cleaner.cleanUp();
});
it('should get the open PRs', () => {
expect(cleanerGetOpenPrNumbersSpy).toHaveBeenCalled(); expect(cleanerGetOpenPrNumbersSpy).toHaveBeenCalled();
}); });
it('should reject if \'getExistingBuildNumbers()\' rejects', done => { it('should get the existing builds', async () => {
promise.catch(err => { await cleaner.cleanUp();
expect(cleanerGetExistingBuildNumbersSpy).toHaveBeenCalled();
});
it('should get the existing downloads', async () => {
await cleaner.cleanUp();
expect(cleanerGetExistingDownloadsSpy).toHaveBeenCalled();
});
it('should pass existing builds and open PRs to \'removeUnnecessaryBuilds()\'', async () => {
await cleaner.cleanUp();
expect(cleanerRemoveUnnecessaryBuildsSpy).toHaveBeenCalledWith(EXISTING_BUILDS, OPEN_PRS);
});
it('should pass existing downloads and open PRs to \'removeUnnecessaryDownloads()\'', async () => {
await cleaner.cleanUp();
expect(cleanerRemoveUnnecessaryDownloadsSpy).toHaveBeenCalledWith(EXISTING_DOWNLOADS, OPEN_PRS);
});
it('should reject if \'getOpenPrNumbers()\' rejects', async () => {
try {
cleanerGetOpenPrNumbersSpy.and.callFake(() => Promise.reject('Test'));
await cleaner.cleanUp();
} catch (err) {
expect(err).toBe('Test'); expect(err).toBe('Test');
done(); }
});
existingBuildsDeferred.reject('Test');
}); });
it('should reject if \'getOpenPrNumbers()\' rejects', done => { it('should reject if \'getExistingBuildNumbers()\' rejects', async () => {
promise.catch(err => { try {
cleanerGetExistingBuildNumbersSpy.and.callFake(() => Promise.reject('Test'));
await cleaner.cleanUp();
} catch (err) {
expect(err).toBe('Test'); expect(err).toBe('Test');
done(); }
});
openPrsDeferred.reject('Test');
}); });
it('should reject if \'removeUnnecessaryBuilds()\' rejects', done => { it('should reject if \'getExistingDownloads()\' rejects', async () => {
promise.catch(err => { try {
cleanerGetExistingDownloadsSpy.and.callFake(() => Promise.reject('Test'));
await cleaner.cleanUp();
} catch (err) {
expect(err).toBe('Test'); expect(err).toBe('Test');
done(); }
});
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.reject('Test'));
existingBuildsDeferred.resolve();
openPrsDeferred.resolve();
}); });
it('should pass existing builds and open PRs to \'removeUnnecessaryBuilds()\'', done => { it('should reject if \'removeUnnecessaryBuilds()\' rejects', async () => {
promise.then(() => { try {
expect(cleanerRemoveUnnecessaryBuildsSpy).toHaveBeenCalledWith('foo', 'bar'); cleanerRemoveUnnecessaryBuildsSpy.and.callFake(() => Promise.reject('Test'));
done(); await cleaner.cleanUp();
}); } catch (err) {
expect(err).toBe('Test');
existingBuildsDeferred.resolve('foo'); }
openPrsDeferred.resolve('bar');
}); });
it('should reject if \'removeUnnecessaryDownloads()\' rejects', async () => {
it('should resolve with the value returned by \'removeUnnecessaryBuilds()\'', done => { try {
promise.then(result => { cleanerRemoveUnnecessaryDownloadsSpy.and.callFake(() => Promise.reject('Test'));
expect(result as any).toBe('Test'); await cleaner.cleanUp();
done(); } catch (err) {
}); expect(err).toBe('Test');
}
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.resolve('Test'));
existingBuildsDeferred.resolve();
openPrsDeferred.resolve();
}); });
}); });
// Protected methods
describe('getExistingBuildNumbers()', () => { describe('getExistingBuildNumbers()', () => {
let fsReaddirSpy: jasmine.Spy; let fsReaddirSpy: jasmine.Spy;
let readdirCb: (err: any, files?: string[]) => void; let readdirCb: (err: any, files?: string[]) => void;
@ -137,7 +178,7 @@ describe('BuildCleaner', () => {
beforeEach(() => { beforeEach(() => {
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb); fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
promise = (cleaner as any).getExistingBuildNumbers(); promise = cleaner.getExistingBuildNumbers();
}); });
@ -203,7 +244,7 @@ describe('BuildCleaner', () => {
return new Promise((resolve, reject) => prDeferred = {resolve, reject}); return new Promise((resolve, reject) => prDeferred = {resolve, reject});
}); });
promise = (cleaner as any).getOpenPrNumbers(); promise = cleaner.getOpenPrNumbers();
}); });
@ -236,6 +277,65 @@ describe('BuildCleaner', () => {
prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]); prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]);
}); });
it('should log the number of open PRs', () => {
promise.then(prNumbers => {
expect(console.log).toHaveBeenCalledWith(ANY_DATE, 'BuildCleaner: ', `Open pull requests: ${prNumbers}`);
});
});
});
describe('getExistingDownloads()', () => {
let fsReaddirSpy: jasmine.Spy;
let readdirCb: (err: any, files?: string[]) => void;
let promise: Promise<string[]>;
beforeEach(() => {
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
promise = cleaner.getExistingDownloads();
});
it('should return a promise', () => {
expect(promise).toEqual(jasmine.any(Promise));
});
it('should get the contents of the builds directory', () => {
expect(fsReaddirSpy).toHaveBeenCalled();
expect(fsReaddirSpy.calls.argsFor(0)[0]).toBe('downloads');
});
it('should reject if an error occurs while getting the files', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
readdirCb('Test');
});
it('should resolve with the returned files (as numbers)', done => {
promise.then(result => {
expect(result).toEqual(EXISTING_DOWNLOADS);
done();
});
readdirCb(null, EXISTING_DOWNLOADS);
});
it('should ignore files that do not match the artifactPath', done => {
promise.then(result => {
expect(result).toEqual(['10-ABCDEF-build.zip', '30-FFFFFFF-build.zip']);
done();
});
readdirCb(null, ['10-ABCDEF-build.zip', '20-AAAAAAA-otherfile.zip', '30-FFFFFFF-build.zip']);
});
}); });
@ -253,7 +353,7 @@ describe('BuildCleaner', () => {
it('should test if the directory exists (and return if is does not)', () => { it('should test if the directory exists (and return if is does not)', () => {
shellTestSpy.and.returnValue(false); shellTestSpy.and.returnValue(false);
(cleaner as any).removeDir('/foo/bar'); cleaner.removeDir('/foo/bar');
expect(shellTestSpy).toHaveBeenCalledWith('-d', '/foo/bar'); expect(shellTestSpy).toHaveBeenCalledWith('-d', '/foo/bar');
expect(shellChmodSpy).not.toHaveBeenCalled(); expect(shellChmodSpy).not.toHaveBeenCalled();
@ -262,99 +362,117 @@ describe('BuildCleaner', () => {
it('should remove the specified directory and its content', () => { it('should remove the specified directory and its content', () => {
(cleaner as any).removeDir('/foo/bar'); cleaner.removeDir('/foo/bar');
expect(shellRmSpy).toHaveBeenCalledWith('-rf', '/foo/bar'); expect(shellRmSpy).toHaveBeenCalledWith('-rf', '/foo/bar');
}); });
it('should make the directory and its content writable before removing', () => { it('should make the directory and its content writable before removing', () => {
shellRmSpy.and.callFake(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar')); shellRmSpy.and.callFake(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar'));
(cleaner as any).removeDir('/foo/bar'); cleaner.removeDir('/foo/bar');
expect(shellRmSpy).toHaveBeenCalled(); expect(shellRmSpy).toHaveBeenCalled();
}); });
it('should catch errors and log them', () => { it('should catch errors and log them', () => {
const consoleErrorSpy = spyOn(console, 'error');
shellRmSpy.and.callFake(() => { shellRmSpy.and.callFake(() => {
// tslint:disable-next-line: no-string-throw // tslint:disable-next-line: no-string-throw
throw 'Test'; throw 'Test';
}); });
(cleaner as any).removeDir('/foo/bar'); cleaner.removeDir('/foo/bar');
expect(consoleErrorSpy).toHaveBeenCalled(); expect(console.error).toHaveBeenCalledWith(
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain('Unable to remove \'/foo/bar\''); jasmine.any(String), 'BuildCleaner: ', 'ERROR: Unable to remove \'/foo/bar\' due to:', 'Test');
expect(consoleErrorSpy.calls.argsFor(0)[1]).toBe('Test');
}); });
}); });
describe('removeUnnecessaryBuilds()', () => { describe('removeUnnecessaryBuilds()', () => {
let consoleLogSpy: jasmine.Spy;
let cleanerRemoveDirSpy: jasmine.Spy; let cleanerRemoveDirSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
consoleLogSpy = spyOn(console, 'log'); cleanerRemoveDirSpy = spyOn(cleaner, 'removeDir');
cleanerRemoveDirSpy = spyOn(cleaner as any, 'removeDir');
}); });
it('should log the number of existing builds, open PRs and builds to be removed', () => { it('should log the number of existing builds and builds to be removed', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], [3, 4, 5, 6]); cleaner.removeUnnecessaryBuilds([1, 2, 3], [3, 4, 5, 6]);
expect(console.log).toHaveBeenCalledWith('Existing builds: 3'); expect(console.log).toHaveBeenCalledWith(ANY_DATE, 'BuildCleaner: ', 'Existing builds: 3');
expect(console.log).toHaveBeenCalledWith('Open pull requests: 4'); expect(console.log).toHaveBeenCalledWith(ANY_DATE, 'BuildCleaner: ', 'Removing 2 build(s): 1, 2');
expect(console.log).toHaveBeenCalledWith('Removing 2 build(s): 1, 2');
}); });
it('should construct full paths to directories (by prepending \'buildsDir\')', () => { it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []); cleaner.removeUnnecessaryBuilds([1, 2, 3], []);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/2')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/2'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
}); });
it('should try removing hidden directories as well', () => { it('should try removing hidden directories as well', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []); cleaner.removeUnnecessaryBuilds([1, 2, 3], []);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
}); });
it('should remove the builds that do not correspond to open PRs', () => { it('should remove the builds that do not correspond to open PRs', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]); cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4); expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
cleanerRemoveDirSpy.calls.reset(); cleanerRemoveDirSpy.calls.reset();
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]); cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0); expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0);
cleanerRemoveDirSpy.calls.reset(); cleanerRemoveDirSpy.calls.reset();
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []); (cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(8); expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(8);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/2')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/2'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/4')); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/4'));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}4`)); expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}4`));
cleanerRemoveDirSpy.calls.reset(); cleanerRemoveDirSpy.calls.reset();
}); });
}); });
describe('removeUnnecessaryDownloads()', () => {
beforeEach(() => {
spyOn(shell, 'rm');
});
it('should remove the downloads that do not correspond to open PRs', () => {
cleaner.removeUnnecessaryDownloads(EXISTING_DOWNLOADS, OPEN_PRS);
expect(shell.rm).toHaveBeenCalledTimes(2);
expect(shell.rm).toHaveBeenCalledWith('downloads/20-ABCDEF0-build.zip');
expect(shell.rm).toHaveBeenCalledWith('downloads/20-1234567-build.zip');
});
it('should log the number of existing builds and builds to be removed', () => {
cleaner.removeUnnecessaryDownloads(EXISTING_DOWNLOADS, OPEN_PRS);
expect(console.log).toHaveBeenCalledWith(ANY_DATE, 'BuildCleaner: ', 'Existing downloads: 4');
expect(console.log).toHaveBeenCalledWith(ANY_DATE, 'BuildCleaner: ',
'Removing 2 download(s): downloads/20-ABCDEF0-build.zip, downloads/20-1234567-build.zip');
});
});
}); });

View File

@ -0,0 +1,134 @@
import * as nock from 'nock';
import {CircleCiApi} from '../../lib/common/circle-ci-api';
const ORG = 'testorg';
const REPO = 'testrepo';
const TOKEN = 'xxxx';
const BASE_URL = `https://circleci.com/api/v1.1/project/github/${ORG}/${REPO}`;
describe('CircleCIApi', () => {
describe('constructor()', () => {
it('should throw if \'githubOrg\' is missing or empty', () => {
expect(() => new CircleCiApi('', REPO, TOKEN)).
toThrowError('Missing or empty required parameter \'githubOrg\'!');
});
it('should throw if \'githubRepo\' is missing or empty', () => {
expect(() => new CircleCiApi(ORG, '', TOKEN)).
toThrowError('Missing or empty required parameter \'githubRepo\'!');
});
it('should throw if \'circleCiToken\' is missing or empty', () => {
expect(() => new CircleCiApi(ORG, REPO, '')).
toThrowError('Missing or empty required parameter \'circleCiToken\'!');
});
});
describe('getBuildInfo', () => {
it('should make a request to the CircleCI API for the given build number', async () => {
const api = new CircleCiApi(ORG, REPO, TOKEN);
const buildNum = 12345;
const expectedBuildInfo: any = { org: ORG, repo: REPO, build_num: buildNum };
const request = nock(BASE_URL)
.get(`/${buildNum}?circle-token=${TOKEN}`)
.reply(200, expectedBuildInfo);
const buildInfo = await api.getBuildInfo(buildNum);
expect(buildInfo).toEqual(expectedBuildInfo);
request.done();
});
it('should throw an error if the request fails', async () => {
const api = new CircleCiApi(ORG, REPO, TOKEN);
const buildNum = 12345;
const errorMessage = 'Invalid request';
const request = nock(BASE_URL).get(`/${buildNum}?circle-token=${TOKEN}`);
try {
request.replyWithError(errorMessage);
await api.getBuildInfo(buildNum);
throw new Error('Exception Expected');
} catch (err) {
expect(err.message).toEqual(
`CircleCI build info request failed ` +
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
}
try {
request.reply(404, errorMessage);
await api.getBuildInfo(buildNum);
throw new Error('Exception Expected');
} catch (err) {
expect(err.message).toEqual(
`CircleCI build info request failed ` +
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
}
});
});
describe('getBuildArtifactUrl', () => {
it('should make a request to the CircleCI API for the given build number', async () => {
const api = new CircleCiApi(ORG, REPO, TOKEN);
const buildNum = 12345;
const artifact0: any = { path: 'some/path/0', url: 'https://url/0' };
const artifact1: any = { path: 'some/path/1', url: 'https://url/1' };
const artifact2: any = { path: 'some/path/2', url: 'https://url/2' };
const request = nock(BASE_URL)
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
.reply(200, [artifact0, artifact1, artifact2]);
const artifactUrl = await api.getBuildArtifactUrl(buildNum, 'some/path/1');
expect(artifactUrl).toEqual('https://url/1');
request.done();
});
it('should throw an error if the request fails', async () => {
const api = new CircleCiApi(ORG, REPO, TOKEN);
const buildNum = 12345;
const errorMessage = 'Invalid request';
const request = nock(BASE_URL).get(`/${buildNum}/artifacts?circle-token=${TOKEN}`);
try {
request.replyWithError(errorMessage);
await api.getBuildArtifactUrl(buildNum, 'some/path/1');
throw new Error('Exception Expected');
} catch (err) {
expect(err.message).toEqual(
`CircleCI artifact URL request failed ` +
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
}
try {
request.reply(404, errorMessage);
await api.getBuildArtifactUrl(buildNum, 'some/path/1');
throw new Error('Exception Expected');
} catch (err) {
expect(err.message).toEqual(
`CircleCI artifact URL request failed ` +
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
}
});
it('should throw an error if the response does not contain the specified artifact', async () => {
const api = new CircleCiApi(ORG, REPO, TOKEN);
const buildNum = 12345;
const artifact0: any = { path: 'some/path/0', url: 'https://url/0' };
const artifact1: any = { path: 'some/path/1', url: 'https://url/1' };
const artifact2: any = { path: 'some/path/2', url: 'https://url/2' };
nock(BASE_URL)
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
.reply(200, [artifact0, artifact1, artifact2]);
try {
await api.getBuildArtifactUrl(buildNum, 'some/path/3');
throw new Error('Exception Expected');
} catch (err) {
expect(err.message).toEqual(
`CircleCI artifact URL request failed ` +
`(Missing artifact (some/path/3) for CircleCI build: ${buildNum})`);
}
});
});
});

View File

@ -1,7 +1,5 @@
// Imports // Imports
import {EventEmitter} from 'events'; import * as nock from 'nock';
import {ClientRequest, IncomingMessage} from 'http';
import * as https from 'https';
import {GithubApi} from '../../lib/common/github-api'; import {GithubApi} from '../../lib/common/github-api';
// Tests // Tests
@ -110,39 +108,6 @@ describe('GithubApi', () => {
}); });
// Protected methods
describe('buildPath()', () => {
it('should return the pathname if no params', () => {
expect((api as any).buildPath('/foo')).toBe('/foo');
expect((api as any).buildPath('/foo', undefined)).toBe('/foo');
expect((api as any).buildPath('/foo', null)).toBe('/foo');
});
it('should append the params to the pathname', () => {
expect((api as any).buildPath('/foo', {bar: 'baz'})).toBe('/foo?bar=baz');
});
it('should join the params with \'&\'', () => {
expect((api as any).buildPath('/foo', {bar: 1, baz: 2})).toBe('/foo?bar=1&baz=2');
});
it('should ignore undefined/null params', () => {
expect((api as any).buildPath('/foo', {bar: undefined, baz: null})).toBe('/foo');
});
it('should encode param values as URI components', () => {
expect((api as any).buildPath('/foo', {bar: 'b a&z'})).toBe('/foo?bar=b%20a%26z');
});
});
describe('getPaginated()', () => { describe('getPaginated()', () => {
let deferreds: {resolve: (v: any) => void, reject: (v: any) => void}[]; let deferreds: {resolve: (v: any) => void, reject: (v: any) => void}[];
@ -218,191 +183,162 @@ describe('GithubApi', () => {
}); });
describe('request()', () => { // Protected methods
let httpsRequestSpy: jasmine.Spy;
let latestRequest: ClientRequest;
beforeEach(() => { describe('buildPath()', () => {
const originalRequest = https.request;
httpsRequestSpy = spyOn(https, 'request').and.callFake((...args: any[]) => { it('should return the pathname if no params', () => {
latestRequest = originalRequest.apply(https, args); expect((api as any).buildPath('/foo')).toBe('/foo');
expect((api as any).buildPath('/foo', undefined)).toBe('/foo');
spyOn(latestRequest, 'on').and.callThrough(); expect((api as any).buildPath('/foo', null)).toBe('/foo');
spyOn(latestRequest, 'end');
return latestRequest;
});
}); });
it('should append the params to the pathname', () => {
expect((api as any).buildPath('/foo', {bar: 'baz'})).toBe('/foo?bar=baz');
});
it('should join the params with \'&\'', () => {
expect((api as any).buildPath('/foo', {bar: 1, baz: 2})).toBe('/foo?bar=1&baz=2');
});
it('should ignore undefined/null params', () => {
expect((api as any).buildPath('/foo', {bar: undefined, baz: null})).toBe('/foo');
});
it('should encode param values as URI components', () => {
expect((api as any).buildPath('/foo', {bar: 'b a&z'})).toBe('/foo?bar=b%20a%26z');
});
});
describe('request()', () => {
it('should return a promise', () => { it('should return a promise', () => {
nock('https://api.github.com').get('').reply(200);
expect((api as any).request()).toEqual(jasmine.any(Promise)); expect((api as any).request()).toEqual(jasmine.any(Promise));
}); });
it('should call \'https.request()\' with the correct options', () => { it('should call \'https.request()\' with the correct options', () => {
(api as any).request('method', 'path'); const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(200);
expect(httpsRequestSpy).toHaveBeenCalled(); (api as any).request('method', '/path');
expect(httpsRequestSpy.calls.argsFor(0)[0]).toEqual(jasmine.objectContaining({ requestHandler.done();
headers: jasmine.objectContaining({
'User-Agent': `Node/${process.versions.node}`,
}),
host: 'api.github.com',
method: 'method',
path: 'path',
}));
}); });
it('should call specify an \'Authorization\' header if \'githubToken\' is present', () => { it('should add the \'Authorization\' header containing the \'githubToken\'', () => {
(api as any).request('method', 'path'); const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method', undefined, {
expect(httpsRequestSpy).toHaveBeenCalled(); reqheaders: {Authorization: 'token 12345'},
expect(httpsRequestSpy.calls.argsFor(0)[0].headers).toEqual(jasmine.objectContaining({ })
Authorization: 'token 12345', .reply(200);
})); (api as any).request('method', '/path');
requestHandler.done();
}); });
it('should reject on request error', done => { it('should reject on request error', async () => {
(api as any).request('method', 'path').catch((err: any) => { nock('https://api.github.com')
expect(err).toBe('Test'); .intercept('/path', 'method')
done(); .replyWithError('Test');
}); let message = 'Failed to reject error';
await (api as any).request('method', '/path').catch((err: any) => message = err.message);
latestRequest.emit('error', 'Test'); expect(message).toEqual('Test');
});
it('should send the request (i.e. call \'end()\')', () => {
(api as any).request('method', 'path');
expect(latestRequest.end).toHaveBeenCalled();
}); });
it('should \'JSON.stringify\' and send the data along with the request', () => { it('should \'JSON.stringify\' and send the data along with the request', () => {
(api as any).request('method', 'path'); const data = {key: 'value'};
expect(latestRequest.end).toHaveBeenCalledWith(null); const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method', JSON.stringify(data))
(api as any).request('method', 'path', {key: 'value'}); .reply(200);
expect(latestRequest.end).toHaveBeenCalledWith('{"key":"value"}'); (api as any).request('method', '/path', data);
requestHandler.done();
}); });
describe('onResponse', () => { it('should reject if response statusCode is <200', done => {
let promise: Promise<object>; const requestHandler = nock('https://api.github.com')
let respond: (statusCode: number) => IncomingMessage; .intercept('/path', 'method')
.reply(199);
beforeEach(() => { (api as any).request('method', '/path')
promise = (api as any).request('method', 'path'); .catch((err: string) => {
respond = (statusCode: number) => {
const mockResponse = new EventEmitter() as IncomingMessage;
mockResponse.statusCode = statusCode;
const onResponse = httpsRequestSpy.calls.argsFor(0)[1];
onResponse(mockResponse);
return mockResponse;
};
});
it('should reject on response error', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
const res = respond(200);
res.emit('error', 'Test');
});
it('should reject if returned statusCode is <200', done => {
promise.catch(err => {
expect(err).toContain('failed'); expect(err).toContain('failed');
expect(err).toContain('status: 199'); expect(err).toContain('status: 199');
done(); done();
}); });
requestHandler.done();
const res = respond(199); });
res.emit('end');
});
it('should reject if returned statusCode is >=400', done => { it('should reject if response statusCode is >=400', done => {
promise.catch(err => { const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(400);
(api as any).request('method', '/path')
.catch((err: string) => {
expect(err).toContain('failed'); expect(err).toContain('failed');
expect(err).toContain('status: 400'); expect(err).toContain('status: 400');
done(); done();
}); });
requestHandler.done();
const res = respond(400); });
res.emit('end');
});
it('should include the response text in the rejection message', done => { it('should include the response text in the rejection message', done => {
promise.catch(err => { const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(500, 'Test');
(api as any).request('method', '/path')
.catch((err: string) => {
expect(err).toContain('Test'); expect(err).toContain('Test');
done(); done();
}); });
requestHandler.done();
});
const res = respond(500);
res.emit('data', 'Test'); it('should resolve if returned statusCode is >=200 and <400', done => {
res.emit('end'); const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(200);
(api as any).request('method', '/path').then(done);
requestHandler.done();
});
it('should parse the response body into an object using \'JSON.parse\'', done => {
const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(300, '{"foo": "bar"}');
(api as any).request('method', '/path').then((data: any) => {
expect(data).toEqual({foo: 'bar'});
done();
}); });
requestHandler.done();
});
it('should reject if the response text is malformed JSON', done => {
const requestHandler = nock('https://api.github.com')
.intercept('/path', 'method')
.reply(300, '}');
it('should resolve if returned statusCode is <=200 <400', done => { (api as any).request('method', '/path').catch((err: any) => {
promise.then(done); expect(err).toEqual(jasmine.any(SyntaxError));
done();
const res = respond(200);
res.emit('data', '{}');
res.emit('end');
}); });
requestHandler.done();
it('should resolve with the response text \'JSON.parsed\'', done => {
promise.then(data => {
expect(data).toEqual({foo: 'bar'});
done();
});
const res = respond(300);
res.emit('data', '{"foo":"bar"}');
res.emit('end');
});
it('should collect and concatenate the whole response text', done => {
promise.then(data => {
expect(data).toEqual({foo: 'bar', baz: 'qux'});
done();
});
const res = respond(300);
res.emit('data', '{"foo":');
res.emit('data', '"bar","baz"');
res.emit('data', ':"qux"}');
res.emit('end');
});
it('should reject if the response text is malformed JSON', done => {
promise.catch(err => {
expect(err).toEqual(jasmine.any(SyntaxError));
done();
});
const res = respond(300);
res.emit('data', '}');
res.emit('end');
});
}); });
}); });

View File

@ -1,20 +1,27 @@
// Imports // Imports
import {GithubApi} from '../../lib/common/github-api';
import {GithubPullRequests} from '../../lib/common/github-pull-requests'; import {GithubPullRequests} from '../../lib/common/github-pull-requests';
// Tests // Tests
describe('GithubPullRequests', () => { describe('GithubPullRequests', () => {
let githubApi: jasmine.SpyObj<GithubApi>;
beforeEach(() => {
githubApi = jasmine.createSpyObj('githubApi', ['post', 'get', 'getPaginated']);
});
describe('constructor()', () => { describe('constructor()', () => {
it('should throw if \'githubToken\' is missing or empty', () => { it('should throw if \'githubOrg\' is missing or empty', () => {
expect(() => new GithubPullRequests('', 'foo/bar')). expect(() => new GithubPullRequests(githubApi, '', 'bar')).
toThrowError('Missing or empty required parameter \'githubToken\'!'); toThrowError('Missing or empty required parameter \'githubOrg\'!');
}); });
it('should throw if \'repoSlug\' is missing or empty', () => { it('should throw if \'githubRepo\' is missing or empty', () => {
expect(() => new GithubPullRequests('12345', '')). expect(() => new GithubPullRequests(githubApi, 'foo', '')).
toThrowError('Missing or empty required parameter \'repoSlug\'!'); toThrowError('Missing or empty required parameter \'githubRepo\'!');
}); });
}); });
@ -22,17 +29,9 @@ describe('GithubPullRequests', () => {
describe('addComment()', () => { describe('addComment()', () => {
let prs: GithubPullRequests; let prs: GithubPullRequests;
let deferred: {resolve: (v: any) => void, reject: (v: any) => void};
beforeEach(() => { beforeEach(() => {
prs = new GithubPullRequests('12345', 'foo/bar'); prs = new GithubPullRequests(githubApi, 'foo', 'bar');
spyOn(prs, 'post').and.callFake(() => new Promise((resolve, reject) => deferred = {resolve, reject}));
});
it('should return a promise', () => {
expect(prs.addComment(42, 'body')).toEqual(jasmine.any(Promise));
}); });
@ -47,30 +46,28 @@ describe('GithubPullRequests', () => {
}); });
it('should call \'post()\' with the correct pathname, params and data', () => { it('should make a POST request to Github with the correct pathname, params and data', () => {
githubApi.post.and.callFake(() => Promise.resolve());
prs.addComment(42, 'body'); prs.addComment(42, 'body');
expect(githubApi.post).toHaveBeenCalledWith('/repos/foo/bar/issues/42/comments', null, {body: 'body'});
expect(prs.post).toHaveBeenCalledWith('/repos/foo/bar/issues/42/comments', null, {body: 'body'});
}); });
it('should reject if the request fails', done => { it('should reject if the request fails', done => {
githubApi.post.and.callFake(() => Promise.reject('Test'));
prs.addComment(42, 'body').catch(err => { prs.addComment(42, 'body').catch(err => {
expect(err).toBe('Test'); expect(err).toBe('Test');
done(); done();
}); });
deferred.reject('Test');
}); });
it('should resolve with the returned response', done => { it('should resolve with the data from the Github POST', done => {
githubApi.post.and.callFake(() => Promise.resolve('Test'));
prs.addComment(42, 'body').then(data => { prs.addComment(42, 'body').then(data => {
expect(data as any).toBe('Test'); expect(data).toBe('Test');
done(); done();
}); });
deferred.resolve('Test');
}); });
}); });
@ -78,35 +75,34 @@ describe('GithubPullRequests', () => {
describe('fetch()', () => { describe('fetch()', () => {
let prs: GithubPullRequests; let prs: GithubPullRequests;
let prsGetSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
prs = new GithubPullRequests('12345', 'foo/bar'); prs = new GithubPullRequests(githubApi, 'foo', 'bar');
prsGetSpy = spyOn(prs as any, 'get');
}); });
it('should call \'get()\' with the correct pathname', () => { it('should make a GET request to GitHub with the correct pathname', () => {
prs.fetch(42); prs.fetch(42);
expect(prsGetSpy).toHaveBeenCalledWith('/repos/foo/bar/issues/42'); expect(githubApi.get).toHaveBeenCalledWith('/repos/foo/bar/issues/42');
}); });
it('should forward the value returned by \'get()\'', () => { it('should resolve with the data returned from GitHub', done => {
prsGetSpy.and.returnValue('Test'); const expected: any = {number: 42};
expect(prs.fetch(42) as any).toBe('Test'); githubApi.get.and.callFake(() => Promise.resolve(expected));
prs.fetch(42).then(data => {
expect(data).toEqual(expected);
done();
});
}); });
}); });
describe('fetchAll()', () => { describe('fetchAll()', () => {
let prs: GithubPullRequests; let prs: GithubPullRequests;
let prsGetPaginatedSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
prs = new GithubPullRequests('12345', 'foo/bar'); prs = new GithubPullRequests(githubApi, 'foo', 'bar');
prsGetPaginatedSpy = spyOn(prs as any, 'getPaginated');
spyOn(console, 'log'); spyOn(console, 'log');
}); });
@ -118,24 +114,48 @@ describe('GithubPullRequests', () => {
prs.fetchAll('closed'); prs.fetchAll('closed');
prs.fetchAll('open'); prs.fetchAll('open');
expect(prsGetPaginatedSpy).toHaveBeenCalledTimes(3); expect(githubApi.getPaginated).toHaveBeenCalledTimes(3);
expect(prsGetPaginatedSpy.calls.argsFor(0)).toEqual([expectedPathname, {state: 'all'}]); expect(githubApi.getPaginated.calls.argsFor(0)).toEqual([expectedPathname, {state: 'all'}]);
expect(prsGetPaginatedSpy.calls.argsFor(1)).toEqual([expectedPathname, {state: 'closed'}]); expect(githubApi.getPaginated.calls.argsFor(1)).toEqual([expectedPathname, {state: 'closed'}]);
expect(prsGetPaginatedSpy.calls.argsFor(2)).toEqual([expectedPathname, {state: 'open'}]); expect(githubApi.getPaginated.calls.argsFor(2)).toEqual([expectedPathname, {state: 'open'}]);
}); });
it('should default to \'all\' if no state is specified', () => { it('should default to \'all\' if no state is specified', () => {
prs.fetchAll(); prs.fetchAll();
expect(prsGetPaginatedSpy).toHaveBeenCalledWith('/repos/foo/bar/pulls', {state: 'all'}); expect(githubApi.getPaginated).toHaveBeenCalledWith('/repos/foo/bar/pulls', {state: 'all'});
}); });
it('should forward the value returned by \'getPaginated()\'', () => { it('should forward the value returned by \'getPaginated()\'', () => {
prsGetPaginatedSpy.and.returnValue('Test'); githubApi.getPaginated.and.returnValue('Test');
expect(prs.fetchAll() as any).toBe('Test'); expect(prs.fetchAll() as any).toBe('Test');
}); });
}); });
describe('fetchFiles()', () => {
let prs: GithubPullRequests;
beforeEach(() => {
prs = new GithubPullRequests(githubApi, 'foo', 'bar');
});
it('should make a GET request to GitHub with the correct pathname', () => {
prs.fetchFiles(42);
expect(githubApi.get).toHaveBeenCalledWith('/repos/foo/bar/pulls/42/files');
});
it('should resolve with the data returned from GitHub', done => {
const expected: any = [{ sha: 'ABCDE', filename: 'a/b/c'}, { sha: '12345', filename: 'x/y/z' }];
githubApi.get.and.callFake(() => Promise.resolve(expected));
prs.fetch(42).then(data => {
expect(data).toEqual(expected);
done();
});
});
});
}); });

View File

@ -1,43 +1,40 @@
// Imports import {GithubApi} from '../../lib/common/github-api';
import {GithubTeams} from '../../lib/common/github-teams'; import {GithubTeams} from '../../lib/common/github-teams';
// Tests // Tests
describe('GithubTeams', () => { describe('GithubTeams', () => {
let githubApi: jasmine.SpyObj<GithubApi>;
beforeEach(() => {
githubApi = jasmine.createSpyObj('githubApi', ['post', 'get', 'getPaginated']);
});
describe('constructor()', () => { describe('constructor()', () => {
it('should throw if \'githubToken\' is missing or empty', () => { it('should throw if \'githubOrg\' is missing or empty', () => {
expect(() => new GithubTeams('', 'org')). expect(() => new GithubTeams(githubApi, '')).
toThrowError('Missing or empty required parameter \'githubToken\'!'); toThrowError('Missing or empty required parameter \'githubOrg\'!');
}); });
it('should throw if \'organization\' is missing or empty', () => {
expect(() => new GithubTeams('12345', '')).
toThrowError('Missing or empty required parameter \'organization\'!');
});
}); });
describe('fetchAll()', () => { describe('fetchAll()', () => {
let teams: GithubTeams; let teams: GithubTeams;
let teamsGetPaginatedSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
teams = new GithubTeams('12345', 'foo'); teams = new GithubTeams(githubApi, 'foo');
teamsGetPaginatedSpy = spyOn(teams as any, 'getPaginated');
}); });
it('should call \'getPaginated()\' with the correct pathname and params', () => { it('should call \'getPaginated()\' with the correct pathname and params', () => {
teams.fetchAll(); teams.fetchAll();
expect(teamsGetPaginatedSpy).toHaveBeenCalledWith('/orgs/foo/teams'); expect(githubApi.getPaginated).toHaveBeenCalledWith('/orgs/foo/teams');
}); });
it('should forward the value returned by \'getPaginated()\'', () => { it('should forward the value returned by \'getPaginated()\'', () => {
teamsGetPaginatedSpy.and.returnValue('Test'); githubApi.getPaginated.and.returnValue('Test');
expect(teams.fetchAll() as any).toBe('Test'); expect(teams.fetchAll() as any).toBe('Test');
}); });
@ -46,19 +43,15 @@ describe('GithubTeams', () => {
describe('isMemberById()', () => { describe('isMemberById()', () => {
let teams: GithubTeams; let teams: GithubTeams;
let teamsGetSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
teams = new GithubTeams('12345', 'foo'); teams = new GithubTeams(githubApi, 'foo');
teamsGetSpy = spyOn(teams, 'get').and.returnValue(Promise.resolve(null));
}); });
it('should return a promise', done => { it('should return a promise', () => {
githubApi.get.and.callFake(() => Promise.resolve());
const promise = teams.isMemberById('user', [1]); const promise = teams.isMemberById('user', [1]);
promise.then(done); // Do not complete the test (and release the spies) synchronously
// to avoid running the actual `get()`.
expect(promise).toEqual(jasmine.any(Promise)); expect(promise).toEqual(jasmine.any(Promise));
}); });
@ -66,42 +59,43 @@ describe('GithubTeams', () => {
it('should resolve with false if called with an empty array', done => { it('should resolve with false if called with an empty array', done => {
teams.isMemberById('user', []).then(isMember => { teams.isMemberById('user', []).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
expect(teamsGetSpy).not.toHaveBeenCalled(); expect(githubApi.get).not.toHaveBeenCalled();
done(); done();
}); });
}); });
it('should call \'get()\' with the correct pathname', done => { it('should call \'get()\' with the correct pathname', done => {
githubApi.get.and.callFake(() => Promise.resolve());
teams.isMemberById('user', [1]).then(() => { teams.isMemberById('user', [1]).then(() => {
expect(teamsGetSpy).toHaveBeenCalledWith('/teams/1/memberships/user'); expect(githubApi.get).toHaveBeenCalledWith('/teams/1/memberships/user');
done(); done();
}); });
}); });
it('should resolve with false if \'get()\' rejects', done => { it('should resolve with false if \'get()\' rejects', done => {
teamsGetSpy.and.returnValue(Promise.reject(null)); githubApi.get.and.callFake(() => Promise.reject(null));
teams.isMemberById('user', [1]).then(isMember => { teams.isMemberById('user', [1]).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
expect(teamsGetSpy).toHaveBeenCalled(); expect(githubApi.get).toHaveBeenCalled();
done(); done();
}); });
}); });
it('should resolve with false if the membership is not active', done => { it('should resolve with false if the membership is not active', done => {
teamsGetSpy.and.returnValue(Promise.resolve({state: 'pending'})); githubApi.get.and.callFake(() => Promise.resolve({state: 'pending'}));
teams.isMemberById('user', [1]).then(isMember => { teams.isMemberById('user', [1]).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
expect(teamsGetSpy).toHaveBeenCalled(); expect(githubApi.get).toHaveBeenCalled();
done(); done();
}); });
}); });
it('should resolve with true if the membership is active', done => { it('should resolve with true if the membership is active', done => {
teamsGetSpy.and.returnValue(Promise.resolve({state: 'active'})); githubApi.get.and.callFake(() => Promise.resolve({state: 'active'}));
teams.isMemberById('user', [1]).then(isMember => { teams.isMemberById('user', [1]).then(isMember => {
expect(isMember).toBe(true); expect(isMember).toBe(true);
done(); done();
@ -115,15 +109,15 @@ describe('GithubTeams', () => {
'/teams/2/memberships/user': Promise.reject(null), '/teams/2/memberships/user': Promise.reject(null),
'/teams/3/memberships/user': Promise.resolve({state: 'active'}), '/teams/3/memberships/user': Promise.resolve({state: 'active'}),
}; };
teamsGetSpy.and.callFake((pathname: string) => trainedResponses[pathname]); githubApi.get.and.callFake((pathname: string) => trainedResponses[pathname]);
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => { teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
expect(isMember).toBe(true); expect(isMember).toBe(true);
expect(teamsGetSpy).toHaveBeenCalledTimes(3); expect(githubApi.get).toHaveBeenCalledTimes(3);
expect(teamsGetSpy.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user'); expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
expect(teamsGetSpy.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user'); expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
expect(teamsGetSpy.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user'); expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
done(); done();
}); });
@ -137,16 +131,16 @@ describe('GithubTeams', () => {
'/teams/3/memberships/user': Promise.resolve({state: 'not active'}), '/teams/3/memberships/user': Promise.resolve({state: 'not active'}),
'/teams/4/memberships/user': Promise.reject(null), '/teams/4/memberships/user': Promise.reject(null),
}; };
teamsGetSpy.and.callFake((pathname: string) => trainedResponses[pathname]); githubApi.get.and.callFake((pathname: string) => trainedResponses[pathname]);
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => { teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
expect(teamsGetSpy).toHaveBeenCalledTimes(4); expect(githubApi.get).toHaveBeenCalledTimes(4);
expect(teamsGetSpy.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user'); expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
expect(teamsGetSpy.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user'); expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
expect(teamsGetSpy.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user'); expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
expect(teamsGetSpy.calls.argsFor(3)[0]).toBe('/teams/4/memberships/user'); expect(githubApi.get.calls.argsFor(3)[0]).toBe('/teams/4/memberships/user');
done(); done();
}); });
@ -161,7 +155,7 @@ describe('GithubTeams', () => {
let teamsIsMemberByIdSpy: jasmine.Spy; let teamsIsMemberByIdSpy: jasmine.Spy;
beforeEach(() => { beforeEach(() => {
teams = new GithubTeams('12345', 'foo'); teams = new GithubTeams(githubApi, 'foo');
const mockResponse = Promise.resolve([{id: 1, slug: 'team1'}, {id: 2, slug: 'team2'}]); const mockResponse = Promise.resolve([{id: 1, slug: 'team1'}, {id: 2, slug: 'team2'}]);
teamsFetchAllSpy = spyOn(teams, 'fetchAll').and.returnValue(mockResponse); teamsFetchAllSpy = spyOn(teams, 'fetchAll').and.returnValue(mockResponse);
@ -181,7 +175,7 @@ describe('GithubTeams', () => {
it('should resolve with false if \'fetchAll()\' rejects', done => { it('should resolve with false if \'fetchAll()\' rejects', done => {
teamsFetchAllSpy.and.returnValue(Promise.reject(null)); teamsFetchAllSpy.and.callFake(() => Promise.reject(null));
teams.isMemberBySlug('user', ['team-slug']).then(isMember => { teams.isMemberBySlug('user', ['team-slug']).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
done(); done();
@ -209,7 +203,7 @@ describe('GithubTeams', () => {
it('should resolve with false if \'isMemberById()\' rejects', done => { it('should resolve with false if \'isMemberById()\' rejects', done => {
teamsIsMemberByIdSpy.and.returnValue(Promise.reject(null)); teamsIsMemberByIdSpy.and.callFake(() => Promise.reject(null));
teams.isMemberBySlug('user', ['team1']).then(isMember => { teams.isMemberBySlug('user', ['team1']).then(isMember => {
expect(isMember).toBe(false); expect(isMember).toBe(false);
expect(teamsIsMemberByIdSpy).toHaveBeenCalled(); expect(teamsIsMemberByIdSpy).toHaveBeenCalled();
@ -218,16 +212,17 @@ describe('GithubTeams', () => {
}); });
it('should resolve with the value \'isMemberById()\' resolves with', done => { it('should resolve with the value \'isMemberById()\' resolves with', async () => {
teamsIsMemberByIdSpy.and.returnValues(Promise.resolve(false), Promise.resolve(true));
Promise.all([ teamsIsMemberByIdSpy.and.callFake(() => Promise.resolve(true));
teams.isMemberBySlug('user', ['team1']).then(isMember => expect(isMember).toBe(false)), const isMember1 = await teams.isMemberBySlug('user', ['team1']);
teams.isMemberBySlug('user', ['team1']).then(isMember => expect(isMember).toBe(true)), expect(isMember1).toBe(true);
]).then(() => { expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', [1]);
expect(teamsIsMemberByIdSpy).toHaveBeenCalledTimes(2);
done(); teamsIsMemberByIdSpy.and.callFake(() => Promise.resolve(false));
}); const isMember2 = await teams.isMemberBySlug('user', ['team1']);
expect(isMember2).toBe(false);
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', [1]);
}); });
}); });

View File

@ -1,9 +1,53 @@
// Imports // Imports
import {assertNotMissingOrEmpty, getEnvVar} from '../../lib/common/utils'; import {
assert,
assertNotMissingOrEmpty,
computeArtifactDownloadPath,
computeShortSha,
getEnvVar,
getPrInfoFromDownloadPath,
} from '../../lib/common/utils';
// Tests // Tests
describe('utils', () => { describe('utils', () => {
describe('computeShortSha', () => {
it('should return only the first SHORT_SHA_LEN characters of the SHA', () => {
expect(computeShortSha('0123456789')).toEqual('0123456');
expect(computeShortSha('ABC')).toEqual('ABC');
expect(computeShortSha('')).toEqual('');
});
});
describe('assert', () => {
it('should throw if passed a false value', () => {
expect(() => assert(false, 'error message')).toThrowError('error message');
});
it('should not throw if passed a true value', () => {
expect(() => assert(true, 'error message')).not.toThrow();
});
});
describe('computeArtifactDownloadPath', () => {
it('should compute an absolute path based on the artifact info provided', () => {
const downloadDir = '/a/b/c';
const pr = 123;
const sha = 'ABCDEF1234567';
const artifactPath = 'a/path/to/file.zip';
const path = computeArtifactDownloadPath(downloadDir, pr, sha, artifactPath);
expect(path).toEqual('/a/b/c/123-ABCDEF1-file.zip');
});
});
describe('getPrInfoFromDownloadPath', () => {
it('should extract the PR and SHA from the file path', () => {
const {pr, sha} = getPrInfoFromDownloadPath('a/b/c/12345-ABCDE-artifact.zip');
expect(pr).toEqual(12345);
expect(sha).toEqual('ABCDE');
});
});
describe('assertNotMissingOrEmpty()', () => { describe('assertNotMissingOrEmpty()', () => {
it('should throw if passed an empty value', () => { it('should throw if passed an empty value', () => {

View File

@ -5,20 +5,20 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as shell from 'shelljs'; import * as shell from 'shelljs';
import {SHORT_SHA_LEN} from '../../lib/common/constants'; import {SHORT_SHA_LEN} from '../../lib/common/constants';
import {BuildCreator} from '../../lib/upload-server/build-creator'; import {BuildCreator} from '../../lib/preview-server/build-creator';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events'; import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/preview-server/build-events';
import {UploadError} from '../../lib/upload-server/upload-error'; import {PreviewServerError} from '../../lib/preview-server/preview-error';
import {expectToBeUploadError} from './helpers'; import {expectToBePreviewServerError} from './helpers';
// Tests // Tests
describe('BuildCreator', () => { describe('BuildCreator', () => {
const pr = '9'; const pr = 9;
const sha = '9'.repeat(40); const sha = '9'.repeat(40);
const shortSha = sha.substr(0, SHORT_SHA_LEN); const shortSha = sha.substr(0, SHORT_SHA_LEN);
const archive = 'snapshot.tar.gz'; const archive = 'snapshot.tar.gz';
const buildsDir = 'builds/dir'; const buildsDir = 'builds/dir';
const hiddenPrDir = path.join(buildsDir, `hidden--${pr}`); const hiddenPrDir = path.join(buildsDir, `hidden--${pr}`);
const publicPrDir = path.join(buildsDir, pr); const publicPrDir = path.join(buildsDir, `${pr}`);
const hiddenShaDir = path.join(hiddenPrDir, shortSha); const hiddenShaDir = path.join(hiddenPrDir, shortSha);
const publicShaDir = path.join(publicPrDir, shortSha); const publicShaDir = path.join(publicPrDir, shortSha);
let bc: BuildCreator; let bc: BuildCreator;
@ -134,8 +134,8 @@ describe('BuildCreator', () => {
it('should abort and skip further operations if changing the PR\'s visibility fails', done => { it('should abort and skip further operations if changing the PR\'s visibility fails', done => {
const mockError = new UploadError(543, 'Test'); const mockError = new PreviewServerError(543, 'Test');
bcUpdatePrVisibilitySpy.and.returnValue(Promise.reject(mockError)); bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject(mockError));
bc.create(pr, sha, archive, isPublic).catch(err => { bc.create(pr, sha, archive, isPublic).catch(err => {
expect(err).toBe(mockError); expect(err).toBe(mockError);
@ -154,7 +154,7 @@ describe('BuildCreator', () => {
existsValues[shaDir] = true; existsValues[shaDir] = true;
bc.create(pr, sha, archive, isPublic).catch(err => { bc.create(pr, sha, archive, isPublic).catch(err => {
const publicOrNot = isPublic ? 'public' : 'non-public'; const publicOrNot = isPublic ? 'public' : 'non-public';
expectToBeUploadError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`); expectToBePreviewServerError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
expect(shellMkdirSpy).not.toHaveBeenCalled(); expect(shellMkdirSpy).not.toHaveBeenCalled();
expect(bcExtractArchiveSpy).not.toHaveBeenCalled(); expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
expect(bcEmitSpy).not.toHaveBeenCalled(); expect(bcEmitSpy).not.toHaveBeenCalled();
@ -171,7 +171,7 @@ describe('BuildCreator', () => {
bc.create(pr, sha, archive, isPublic).catch(err => { bc.create(pr, sha, archive, isPublic).catch(err => {
const publicOrNot = isPublic ? 'public' : 'non-public'; const publicOrNot = isPublic ? 'public' : 'non-public';
expectToBeUploadError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`); expectToBePreviewServerError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
expect(shellMkdirSpy).not.toHaveBeenCalled(); expect(shellMkdirSpy).not.toHaveBeenCalled();
expect(bcExtractArchiveSpy).not.toHaveBeenCalled(); expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
expect(bcEmitSpy).not.toHaveBeenCalled(); expect(bcEmitSpy).not.toHaveBeenCalled();
@ -222,20 +222,20 @@ describe('BuildCreator', () => {
}); });
it('should reject with an UploadError', done => { it('should reject with an PreviewServerError', done => {
// tslint:disable-next-line: no-string-throw // tslint:disable-next-line: no-string-throw
shellMkdirSpy.and.callFake(() => { throw 'Test'; }); shellMkdirSpy.and.callFake(() => { throw 'Test'; });
bc.create(pr, sha, archive, isPublic).catch(err => { bc.create(pr, sha, archive, isPublic).catch(err => {
expectToBeUploadError(err, 500, `Error while uploading to directory: ${shaDir}\nTest`); expectToBePreviewServerError(err, 500, `Error while creating preview at: ${shaDir}\nTest`);
done(); done();
}); });
}); });
it('should pass UploadError instances unmodified', done => { it('should pass PreviewServerError instances unmodified', done => {
shellMkdirSpy.and.callFake(() => { throw new UploadError(543, 'Test'); }); shellMkdirSpy.and.callFake(() => { throw new PreviewServerError(543, 'Test'); });
bc.create(pr, sha, archive, isPublic).catch(err => { bc.create(pr, sha, archive, isPublic).catch(err => {
expectToBeUploadError(err, 543, 'Test'); expectToBePreviewServerError(err, 543, 'Test');
done(); done();
}); });
}); });
@ -324,7 +324,7 @@ describe('BuildCreator', () => {
const shas = ['foo', 'bar', 'baz']; const shas = ['foo', 'bar', 'baz'];
let emitted = false; let emitted = false;
bcListShasByDate.and.returnValue(Promise.resolve(shas)); bcListShasByDate.and.callFake(() => Promise.resolve(shas));
bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => { bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => {
expect(bcListShasByDate).toHaveBeenCalledWith(newPrDir); expect(bcListShasByDate).toHaveBeenCalledWith(newPrDir);
@ -376,7 +376,8 @@ describe('BuildCreator', () => {
it('should abort and skip further operations if both directories exist', done => { it('should abort and skip further operations if both directories exist', done => {
bcExistsSpy.and.returnValue(true); bcExistsSpy.and.returnValue(true);
bc.updatePrVisibility(pr, makePublic).catch(err => { bc.updatePrVisibility(pr, makePublic).catch(err => {
expectToBeUploadError(err, 409, `Request to move '${oldPrDir}' to existing directory '${newPrDir}'.`); expectToBePreviewServerError(err, 409,
`Request to move '${oldPrDir}' to existing directory '${newPrDir}'.`);
expect(shellMvSpy).not.toHaveBeenCalled(); expect(shellMvSpy).not.toHaveBeenCalled();
expect(bcListShasByDate).not.toHaveBeenCalled(); expect(bcListShasByDate).not.toHaveBeenCalled();
expect(bcEmitSpy).not.toHaveBeenCalled(); expect(bcEmitSpy).not.toHaveBeenCalled();
@ -407,20 +408,21 @@ describe('BuildCreator', () => {
}); });
it('should reject with an UploadError', done => { it('should reject with an PreviewServerError', done => {
// tslint:disable-next-line: no-string-throw // tslint:disable-next-line: no-string-throw
shellMvSpy.and.callFake(() => { throw 'Test'; }); shellMvSpy.and.callFake(() => { throw 'Test'; });
bc.updatePrVisibility(pr, makePublic).catch(err => { bc.updatePrVisibility(pr, makePublic).catch(err => {
expectToBeUploadError(err, 500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\nTest`); expectToBePreviewServerError(err, 500,
`Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\nTest`);
done(); done();
}); });
}); });
it('should pass UploadError instances unmodified', done => { it('should pass PreviewServerError instances unmodified', done => {
shellMvSpy.and.callFake(() => { throw new UploadError(543, 'Test'); }); shellMvSpy.and.callFake(() => { throw new PreviewServerError(543, 'Test'); });
bc.updatePrVisibility(pr, makePublic).catch(err => { bc.updatePrVisibility(pr, makePublic).catch(err => {
expectToBeUploadError(err, 543, 'Test'); expectToBePreviewServerError(err, 543, 'Test');
done(); done();
}); });
}); });
@ -451,7 +453,7 @@ describe('BuildCreator', () => {
it('should call \'fs.access()\' with the specified argument', () => { it('should call \'fs.access()\' with the specified argument', () => {
(bc as any).exists('foo'); (bc as any).exists('foo');
expect(fs.access).toHaveBeenCalledWith('foo', jasmine.any(Function)); expect(fsAccessSpy).toHaveBeenCalledWith('foo', jasmine.any(Function));
}); });
@ -511,7 +513,8 @@ describe('BuildCreator', () => {
it('should log (as a warning) any stderr output if extracting succeeded', done => { it('should log (as a warning) any stderr output if extracting succeeded', done => {
(bc as any).extractArchive('foo', 'bar'). (bc as any).extractArchive('foo', 'bar').
then(() => expect(consoleWarnSpy).toHaveBeenCalledWith('This is stderr')). then(() => expect(consoleWarnSpy)
.toHaveBeenCalledWith(jasmine.any(String), 'BuildCreator: ', 'This is stderr')).
then(done); then(done);
cpExecCbs[0](null, 'This is stdout', 'This is stderr'); cpExecCbs[0](null, 'This is stdout', 'This is stderr');
@ -527,7 +530,7 @@ describe('BuildCreator', () => {
}); });
it('should delete the uploaded file on success', done => { it('should delete the build artifact file on success', done => {
(bc as any).extractArchive('input/file', 'output/dir'). (bc as any).extractArchive('input/file', 'output/dir').
then(() => expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file')). then(() => expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file')).
then(done); then(done);
@ -567,7 +570,7 @@ describe('BuildCreator', () => {
}); });
it('should abort and reject if it fails to remove the uploaded file', done => { it('should abort and reject if it fails to remove the build artifact file', done => {
(bc as any).extractArchive('foo', 'bar').catch((err: any) => { (bc as any).extractArchive('foo', 'bar').catch((err: any) => {
expect(shellChmodSpy).toHaveBeenCalled(); expect(shellChmodSpy).toHaveBeenCalled();
expect(shellRmSpy).toHaveBeenCalled(); expect(shellRmSpy).toHaveBeenCalled();
@ -618,7 +621,7 @@ describe('BuildCreator', () => {
it('should reject if listing files fails', done => { it('should reject if listing files fails', done => {
shellLsSpy.and.returnValue(Promise.reject('Test')); shellLsSpy.and.callFake(() => Promise.reject('Test'));
(bc as any).listShasByDate('input/dir').catch((err: string) => { (bc as any).listShasByDate('input/dir').catch((err: string) => {
expect(err).toBe('Test'); expect(err).toBe('Test');
done(); done();
@ -627,7 +630,7 @@ describe('BuildCreator', () => {
it('should return the filenames', done => { it('should return the filenames', done => {
shellLsSpy.and.returnValue(Promise.resolve([ shellLsSpy.and.callFake(() => Promise.resolve([
lsResult('foo', 100), lsResult('foo', 100),
lsResult('bar', 200), lsResult('bar', 200),
lsResult('baz', 300), lsResult('baz', 300),
@ -640,7 +643,7 @@ describe('BuildCreator', () => {
it('should sort by date', done => { it('should sort by date', done => {
shellLsSpy.and.returnValue(Promise.resolve([ shellLsSpy.and.callFake(() => Promise.resolve([
lsResult('foo', 300), lsResult('foo', 300),
lsResult('bar', 100), lsResult('bar', 100),
lsResult('baz', 200), lsResult('baz', 200),
@ -660,7 +663,7 @@ describe('BuildCreator', () => {
]; ];
mockArray.sort = jasmine.createSpy('sort'); mockArray.sort = jasmine.createSpy('sort');
shellLsSpy.and.returnValue(Promise.resolve(mockArray)); shellLsSpy.and.callFake(() => Promise.resolve(mockArray));
(bc as any).listShasByDate('input/dir'). (bc as any).listShasByDate('input/dir').
then((shas: string[]) => { then((shas: string[]) => {
expect(shas).toEqual(['bar', 'baz', 'foo']); expect(shas).toEqual(['bar', 'baz', 'foo']);
@ -671,7 +674,7 @@ describe('BuildCreator', () => {
it('should only include directories', done => { it('should only include directories', done => {
shellLsSpy.and.returnValue(Promise.resolve([ shellLsSpy.and.callFake(() => Promise.resolve([
lsResult('foo', 100), lsResult('foo', 100),
lsResult('bar', 200, false), lsResult('bar', 200, false),
lsResult('baz', 300), lsResult('baz', 300),

View File

@ -1,5 +1,5 @@
// Imports // Imports
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events'; import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/preview-server/build-events';
// Tests // Tests
describe('ChangedPrVisibilityEvent', () => { describe('ChangedPrVisibilityEvent', () => {

View File

@ -0,0 +1,191 @@
import * as fs from 'fs';
import * as nock from 'nock';
import {BuildInfo, CircleCiApi} from '../../lib/common/circle-ci-api';
import {BuildRetriever} from '../../lib/preview-server/build-retriever';
describe('BuildRetriever', () => {
const MAX_DOWNLOAD_SIZE = 10000;
const DOWNLOAD_DIR = '/DOWNLOAD/DIR';
const BASE_URL = 'http://test.com';
const ARTIFACT_PATH = '/some/path/build.zip';
let api: CircleCiApi;
let BUILD_INFO: BuildInfo;
let WRITEFILE_RESULT: any;
let writeFileSpy: jasmine.Spy;
let EXISTS_RESULT: boolean;
let existsSpy: jasmine.Spy;
let getBuildArtifactUrlSpy: jasmine.Spy;
beforeEach(() => {
BUILD_INFO = {
branch: 'pull/777',
build_num: 12345,
failed: false,
has_artifacts: true,
outcome: 'success',
reponame: 'REPO',
username: 'ORG',
vcs_revision: 'COMMIT',
};
spyOn(console, 'log');
spyOn(console, 'warn');
spyOn(console, 'error');
api = new CircleCiApi('ORG', 'REPO', 'TOKEN');
spyOn(api, 'getBuildInfo').and.callFake(() => Promise.resolve(BUILD_INFO));
getBuildArtifactUrlSpy = spyOn(api, 'getBuildArtifactUrl')
.and.callFake(() => Promise.resolve(BASE_URL + ARTIFACT_PATH));
WRITEFILE_RESULT = undefined;
writeFileSpy = spyOn(fs, 'writeFile').and.callFake(
(_path: string, _buffer: Buffer, callback: (err?: any) => {}) => callback(WRITEFILE_RESULT),
);
EXISTS_RESULT = false;
existsSpy = spyOn(fs, 'exists').and.callFake(
(_path: string, callback: (exists: boolean) => {}) => callback(EXISTS_RESULT),
);
});
describe('constructor', () => {
it('should fail if the "downloadSizeLimit" is invalid', () => {
expect(() => new BuildRetriever(api, NaN, DOWNLOAD_DIR))
.toThrowError(`Invalid parameter "downloadSizeLimit" should be a number greater than 0.`);
expect(() => new BuildRetriever(api, 0, DOWNLOAD_DIR))
.toThrowError(`Invalid parameter "downloadSizeLimit" should be a number greater than 0.`);
expect(() => new BuildRetriever(api, -1, DOWNLOAD_DIR))
.toThrowError(`Invalid parameter "downloadSizeLimit" should be a number greater than 0.`);
});
it('should fail if the "downloadDir" is missing', () => {
expect(() => new BuildRetriever(api, MAX_DOWNLOAD_SIZE, ''))
.toThrowError(`Missing or empty required parameter 'downloadDir'!`);
});
});
describe('getGithubInfo', () => {
it('should request the info from CircleCI', async () => {
const retriever = new BuildRetriever(api, MAX_DOWNLOAD_SIZE, DOWNLOAD_DIR);
const info = await retriever.getGithubInfo(12345);
expect(api.getBuildInfo).toHaveBeenCalledWith(12345);
expect(info).toEqual({org: 'ORG', pr: 777, repo: 'REPO', sha: 'COMMIT', success: true});
});
it('should error if it is not possible to extract the PR number from the branch', async () => {
const retriever = new BuildRetriever(api, MAX_DOWNLOAD_SIZE, DOWNLOAD_DIR);
try {
BUILD_INFO.branch = 'master';
await retriever.getGithubInfo(12345);
throw new Error('Exception Expected');
} catch (error) {
expect(error.message).toEqual('No PR found in branch field: master');
}
});
});
describe('downloadBuildArtifact', () => {
const ARTIFACT_CONTENTS = 'ARTIFACT CONTENTS';
let retriever: BuildRetriever;
beforeEach(() => {
retriever = new BuildRetriever(api, MAX_DOWNLOAD_SIZE, DOWNLOAD_DIR);
});
it('should get the artifact URL from the CircleCI API', async () => {
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
expect(api.getBuildArtifactUrl).toHaveBeenCalledWith(12345, ARTIFACT_PATH);
artifactRequest.done();
});
it('should download the artifact from its URL', async () => {
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
// The following line proves that the artifact URL fetch occurred.
artifactRequest.done();
});
it('should fail if the artifact is too large', async () => {
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
retriever = new BuildRetriever(api, 10, DOWNLOAD_DIR);
try {
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
throw new Error('Exception Expected');
} catch (error) {
expect(error.status).toEqual(413);
}
artifactRequest.done();
});
it('should not download the artifact if it already exists', async () => {
const artifactRequestInterceptor = nock(BASE_URL).get(ARTIFACT_PATH);
const artifactRequest = artifactRequestInterceptor.reply(200, ARTIFACT_CONTENTS);
EXISTS_RESULT = true;
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
expect(existsSpy).toHaveBeenCalled();
expect(getBuildArtifactUrlSpy).not.toHaveBeenCalled();
expect(artifactRequest.isDone()).toEqual(false);
nock.removeInterceptor(artifactRequestInterceptor);
});
it('should write the artifact file to disk', async () => {
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
expect(writeFileSpy)
.toHaveBeenCalledWith(`${DOWNLOAD_DIR}/777-COMMIT-build.zip`, jasmine.any(Buffer), jasmine.any(Function));
const buffer: Buffer = writeFileSpy.calls.mostRecent().args[1];
expect(buffer.toString()).toEqual(ARTIFACT_CONTENTS);
artifactRequest.done();
});
it('should fail if the CircleCI API fails', async () => {
try {
getBuildArtifactUrlSpy.and.callFake(() => Promise.reject('getBuildArtifactUrl failed'));
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
throw new Error('Exception Expected');
} catch (error) {
expect(error.message).toEqual('CircleCI artifact download failed (getBuildArtifactUrl failed)');
}
});
it('should fail if the URL fetch errors', async () => {
// create a new handler that errors
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).replyWithError('Artifact Request Failed');
try {
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
throw new Error('Exception Expected');
} catch (error) {
expect(error.message).toEqual('CircleCI artifact download failed ' +
'(request to http://test.com/some/path/build.zip failed, reason: Artifact Request Failed)');
}
artifactRequest.done();
});
it('should fail if the URL fetch 404s', async () => {
// create a new handler that errors
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(404, 'No such artifact');
try {
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
throw new Error('Exception Expected');
} catch (error) {
expect(error.message).toEqual('CircleCI artifact download failed (Error 404 - Not Found)');
}
artifactRequest.done();
});
it('should fail if file write fails', async () => {
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
try {
WRITEFILE_RESULT = 'Test Error';
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
throw new Error('Exception Expected');
} catch (error) {
expect(error.message).toEqual('CircleCI artifact download failed (Test Error)');
}
artifactRequest.done();
});
});
});

View File

@ -0,0 +1,180 @@
// Imports
import {GithubApi} from '../../lib/common/github-api';
import {GithubPullRequests, PullRequest} from '../../lib/common/github-pull-requests';
import {GithubTeams} from '../../lib/common/github-teams';
import {BuildVerifier} from '../../lib/preview-server/build-verifier';
// Tests
describe('BuildVerifier', () => {
const defaultConfig = {
allowedTeamSlugs: ['team1', 'team2'],
githubOrg: 'organization',
githubRepo: 'repo',
githubToken: 'githubToken',
secret: 'secret',
trustedPrLabel: 'trusted: pr-label',
};
let prs: GithubPullRequests;
let bv: BuildVerifier;
// Helpers
const createBuildVerifier = (partialConfig: Partial<typeof defaultConfig> = {}) => {
const cfg = {...defaultConfig, ...partialConfig} as typeof defaultConfig;
const api = new GithubApi(cfg.githubToken);
prs = new GithubPullRequests(api, cfg.githubOrg, cfg.githubRepo);
const teams = new GithubTeams(api, cfg.githubOrg);
return new BuildVerifier(prs, teams, cfg.allowedTeamSlugs, cfg.trustedPrLabel);
};
beforeEach(() => bv = createBuildVerifier());
describe('constructor()', () => {
['githubToken', 'githubRepo', 'githubOrg', 'allowedTeamSlugs', 'trustedPrLabel'].
forEach(param => {
it(`should throw if '${param}' is missing or empty`, () => {
expect(() => createBuildVerifier({[param]: ''})).
toThrowError(`Missing or empty required parameter '${param}'!`);
});
});
it('should throw if \'allowedTeamSlugs\' is an empty array', () => {
expect(() => createBuildVerifier({allowedTeamSlugs: []})).
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
});
});
describe('getSignificantFilesChanged', () => {
it('should return false if none of the fetched files match the given pattern', async () => {
const fetchFilesSpy = spyOn(prs, 'fetchFiles');
fetchFilesSpy.and.callFake(() => Promise.resolve([{filename: 'a/b/c'}, {filename: 'd/e/f'}]));
expect(await bv.getSignificantFilesChanged(777, /^x/)).toEqual(false);
expect(fetchFilesSpy).toHaveBeenCalledWith(777);
fetchFilesSpy.calls.reset();
expect(await bv.getSignificantFilesChanged(777, /^a/)).toEqual(true);
expect(fetchFilesSpy).toHaveBeenCalledWith(777);
});
});
describe('getPrIsTrusted()', () => {
const pr = 9;
let mockPrInfo: PullRequest;
let prsFetchSpy: jasmine.Spy;
let teamsIsMemberBySlugSpy: jasmine.Spy;
beforeEach(() => {
mockPrInfo = {
labels: [
{name: 'foo'},
{name: 'bar'},
],
number: 9,
user: {login: 'username'},
};
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
and.callFake(() => Promise.resolve(mockPrInfo));
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
and.callFake(() => Promise.resolve(true));
});
it('should return a promise', done => {
const promise = bv.getPrIsTrusted(pr);
promise.then(done); // Do not complete the test (and release the spies) synchronously
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
expect(promise).toEqual(jasmine.any(Promise));
});
it('should fetch the corresponding PR', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
done();
});
});
it('should fail if fetching the PR errors', done => {
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
bv.getPrIsTrusted(pr).catch(err => {
expect(err).toBe('Test');
done();
});
});
describe('when the PR has the "trusted PR" label', () => {
beforeEach(() => mockPrInfo.labels.push({name: 'trusted: pr-label'}));
it('should resolve to true', done => {
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(true);
done();
});
});
it('should not try to verify the author\'s membership status', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(teamsIsMemberBySlugSpy).not.toHaveBeenCalled();
done();
});
});
});
describe('when the PR does not have the "trusted PR" label', () => {
it('should verify the PR author\'s membership in the specified teams', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
done();
});
});
it('should fail if verifying membership errors', done => {
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
bv.getPrIsTrusted(pr).catch(err => {
expect(err).toBe('Test');
done();
});
});
it('should resolve to true if the PR\'s author is a member', done => {
teamsIsMemberBySlugSpy.and.callFake(() => Promise.resolve(true));
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(true);
done();
});
});
it('should resolve to false if the PR\'s author is not a member', done => {
teamsIsMemberBySlugSpy.and.callFake(() => Promise.resolve(false));
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(false);
done();
});
});
});
});
});

View File

@ -0,0 +1,11 @@
import {PreviewServerError} from '../../lib/preview-server/preview-error';
export const expectToBePreviewServerError = (actual: PreviewServerError, status?: number, message?: string) => {
expect(actual).toEqual(jasmine.any(PreviewServerError));
if (status != null) {
expect(actual.status).toBe(status);
}
if (message != null) {
expect(actual.message).toBe(message);
}
};

View File

@ -0,0 +1,39 @@
// Imports
import {PreviewServerError} from '../../lib/preview-server/preview-error';
// Tests
describe('PreviewServerError', () => {
let err: PreviewServerError;
beforeEach(() => err = new PreviewServerError(999, 'message'));
it('should extend Error', () => {
expect(err).toEqual(jasmine.any(PreviewServerError));
expect(err).toEqual(jasmine.any(Error));
expect(Object.getPrototypeOf(err)).toBe(PreviewServerError.prototype);
});
it('should have a \'status\' property', () => {
expect(err.status).toBe(999);
});
it('should have a \'message\' property', () => {
expect(err.message).toBe('message');
});
it('should have a 500 \'status\' by default', () => {
expect(new PreviewServerError().status).toBe(500);
});
it('should have an empty \'message\' by default', () => {
expect(new PreviewServerError().message).toBe('');
expect(new PreviewServerError(999).message).toBe('');
});
});

View File

@ -0,0 +1,600 @@
// Imports
import * as express from 'express';
import * as http from 'http';
import * as supertest from 'supertest';
import {promisify} from 'util';
import {CircleCiApi} from '../../lib/common/circle-ci-api';
import {GithubApi} from '../../lib/common/github-api';
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
import {GithubTeams} from '../../lib/common/github-teams';
import {BuildCreator} from '../../lib/preview-server/build-creator';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/preview-server/build-events';
import {BuildRetriever, GithubInfo} from '../../lib/preview-server/build-retriever';
import {BuildVerifier} from '../../lib/preview-server/build-verifier';
import {PreviewServerConfig, PreviewServerFactory} from '../../lib/preview-server/preview-server-factory';
interface CircleCiWebHookPayload {
payload: {
build_num: number;
build_parameters: {
CIRCLE_JOB: string;
}
};
}
// Tests
describe('PreviewServerFactory', () => {
const defaultConfig: PreviewServerConfig = {
buildArtifactPath: 'artifact/path.zip',
buildsDir: 'builds/dir',
circleCiToken: 'CIRCLE_CI_TOKEN',
domainName: 'domain.name',
downloadSizeLimit: 999,
downloadsDir: '/tmp/aio-create-builds',
githubOrg: 'organisation',
githubRepo: 'repo',
githubTeamSlugs: ['team1', 'team2'],
githubToken: '12345',
significantFilesPattern: '^(?:aio|packages)\\/(?!.*[._]spec\\.[jt]s$)',
trustedPrLabel: 'trusted: pr-label',
};
// Helpers
const createPreviewServer = (partialConfig: Partial<PreviewServerConfig> = {}) =>
PreviewServerFactory.create({...defaultConfig, ...partialConfig});
beforeEach(() => {
spyOn(console, 'error');
spyOn(console, 'info');
spyOn(console, 'log');
});
describe('create()', () => {
let usfCreateMiddlewareSpy: jasmine.Spy;
beforeEach(() => {
usfCreateMiddlewareSpy = spyOn(PreviewServerFactory, 'createMiddleware').and.callThrough();
});
it('should throw if \'buildsDir\' is missing or empty', () => {
expect(() => createPreviewServer({buildsDir: ''})).
toThrowError('Missing or empty required parameter \'buildsDir\'!');
});
it('should throw if \'domainName\' is missing or empty', () => {
expect(() => createPreviewServer({domainName: ''})).
toThrowError('Missing or empty required parameter \'domainName\'!');
});
it('should throw if \'githubToken\' is missing or empty', () => {
expect(() => createPreviewServer({githubToken: ''})).
toThrowError('Missing or empty required parameter \'githubToken\'!');
});
it('should throw if \'githubOrg\' is missing or empty', () => {
expect(() => createPreviewServer({githubOrg: ''})).
toThrowError('Missing or empty required parameter \'githubOrg\'!');
});
it('should throw if \'githubTeamSlugs\' is missing or empty', () => {
expect(() => createPreviewServer({githubTeamSlugs: []})).
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
});
it('should throw if \'githubRepo\' is missing or empty', () => {
expect(() => createPreviewServer({githubRepo: ''})).
toThrowError('Missing or empty required parameter \'githubRepo\'!');
});
it('should throw if \'trustedPrLabel\' is missing or empty', () => {
expect(() => createPreviewServer({trustedPrLabel: ''})).
toThrowError('Missing or empty required parameter \'trustedPrLabel\'!');
});
it('should return an http.Server', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
const server = createPreviewServer();
expect(server).toBe(httpCreateServerSpy.calls.mostRecent().returnValue);
});
it('should create and use an appropriate BuildCreator', () => {
const usfCreateBuildCreatorSpy = spyOn(PreviewServerFactory, 'createBuildCreator').and.callThrough();
createPreviewServer();
const buildRetriever = jasmine.any(BuildRetriever);
const buildVerifier = jasmine.any(BuildVerifier);
const prs = jasmine.any(GithubPullRequests);
const buildCreator: BuildCreator = usfCreateBuildCreatorSpy.calls.mostRecent().returnValue;
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(buildRetriever, buildVerifier, buildCreator, defaultConfig);
expect(usfCreateBuildCreatorSpy).toHaveBeenCalledWith(prs, 'builds/dir', 'domain.name');
});
it('should create and use an appropriate middleware', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
createPreviewServer();
const buildRetriever = jasmine.any(BuildRetriever);
const buildVerifier = jasmine.any(BuildVerifier);
const buildCreator = jasmine.any(BuildCreator);
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(buildRetriever, buildVerifier, buildCreator, defaultConfig);
const middleware: express.Express = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
expect(httpCreateServerSpy).toHaveBeenCalledWith(middleware);
});
it('should log the server address info on \'listening\'', () => {
const server = createPreviewServer();
server.address = () => ({address: 'foo', family: '', port: 1337});
expect(console.info).not.toHaveBeenCalled();
server.emit('listening');
expect(console.info).toHaveBeenCalledWith(
jasmine.any(String), 'PreviewServer: ', 'Up and running (and listening on foo:1337)...');
});
});
// Protected methods
describe('createBuildCreator()', () => {
let buildCreator: BuildCreator;
beforeEach(() => {
const api = new GithubApi(defaultConfig.githubToken);
const prs = new GithubPullRequests(api, defaultConfig.githubOrg, defaultConfig.githubRepo);
buildCreator = PreviewServerFactory.createBuildCreator(prs, defaultConfig.buildsDir, defaultConfig.domainName);
});
it('should pass the \'buildsDir\' to the BuildCreator', () => {
expect((buildCreator as any).buildsDir).toBe('builds/dir');
});
describe('on \'build.created\'', () => {
let prsAddCommentSpy: jasmine.Spy;
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
it('should post a comment on GitHub for public previews', () => {
const commentBody = 'You can preview 1234567890 at https://pr42-1234567890.domain.name/.';
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
});
it('should not post a comment on GitHub for non-public previews', () => {
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: false});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
});
describe('on \'pr.changedVisibility\'', () => {
let prsAddCommentSpy: jasmine.Spy;
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
it('should post a comment on GitHub (for all SHAs) for PRs made public', () => {
const commentBody = 'You can preview 12345 at https://pr42-12345.domain.name/.\n' +
'You can preview 67890 at https://pr42-67890.domain.name/.';
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
});
it('should not post a comment on GitHub if no SHAs were affected', () => {
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: [], isPublic: true});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
it('should not post a comment on GitHub for PRs made non-public', () => {
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: false});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
});
it('should pass the correct parameters to GithubPullRequests', () => {
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
const allCalls = prsAddCommentSpy.calls.all();
const prs: GithubPullRequests = allCalls[0].object;
expect(prsAddCommentSpy).toHaveBeenCalledTimes(2);
expect(prs).toBe(allCalls[1].object);
expect(prs).toEqual(jasmine.any(GithubPullRequests));
expect(prs.repoSlug).toBe('organisation/repo');
});
});
describe('createMiddleware()', () => {
let buildRetriever: BuildRetriever;
let buildVerifier: BuildVerifier;
let buildCreator: BuildCreator;
let agent: supertest.SuperTest<supertest.Test>;
// Helpers
const promisifyRequest = async (req: supertest.Request) => await promisify(req.end.bind(req))();
const verifyRequests = async (reqs: supertest.Request[]) => await Promise.all(reqs.map(promisifyRequest));
beforeEach(() => {
const circleCiApi = new CircleCiApi(defaultConfig.githubOrg, defaultConfig.githubRepo,
defaultConfig.circleCiToken);
const githubApi = new GithubApi(defaultConfig.githubToken);
const prs = new GithubPullRequests(githubApi, defaultConfig.githubOrg, defaultConfig.githubRepo);
const teams = new GithubTeams(githubApi, defaultConfig.githubOrg);
buildRetriever = new BuildRetriever(circleCiApi, defaultConfig.downloadSizeLimit, defaultConfig.downloadsDir);
buildVerifier = new BuildVerifier(prs, teams, defaultConfig.githubTeamSlugs, defaultConfig.trustedPrLabel);
buildCreator = new BuildCreator(defaultConfig.buildsDir);
const middleware = PreviewServerFactory.createMiddleware(buildRetriever, buildVerifier, buildCreator,
defaultConfig);
agent = supertest.agent(middleware);
});
describe('GET /health-check', () => {
it('should respond with 200', async () => {
await verifyRequests([
agent.get('/health-check').expect(200),
agent.get('/health-check/').expect(200),
]);
});
it('should respond with 404 for non-GET requests', async () => {
await verifyRequests([
agent.put('/health-check').expect(404),
agent.post('/health-check').expect(404),
agent.patch('/health-check').expect(404),
agent.delete('/health-check').expect(404),
]);
});
it('should respond with 404 if the path does not match exactly', async () => {
await verifyRequests([
agent.get('/health-check/foo').expect(404),
agent.get('/health-check-foo').expect(404),
agent.get('/health-checknfoo').expect(404),
agent.get('/foo/health-check').expect(404),
agent.get('/foo-health-check').expect(404),
agent.get('/foonhealth-check').expect(404),
]);
});
});
describe('/circle-build', () => {
let getGithubInfoSpy: jasmine.Spy;
let getSignificantFilesChangedSpy: jasmine.Spy;
let downloadBuildArtifactSpy: jasmine.Spy;
let getPrIsTrustedSpy: jasmine.Spy;
let createBuildSpy: jasmine.Spy;
let IS_PUBLIC: boolean;
let BUILD_INFO: GithubInfo;
let AFFECTS_SIGNIFICANT_FILES: boolean;
let BASIC_PAYLOAD: CircleCiWebHookPayload;
const URL = '/circle-build';
const BUILD_NUM = 12345;
const PR = 777;
const SHA = 'COMMIT';
const DOWNLOADED_ARTIFACT_PATH = 'downloads/777-COMMIT-build.zip';
beforeEach(() => {
IS_PUBLIC = true;
BUILD_INFO = {
org: defaultConfig.githubOrg,
pr: PR,
repo: defaultConfig.githubRepo,
sha: SHA,
success: true,
};
BASIC_PAYLOAD = { payload: { build_num: BUILD_NUM, build_parameters: { CIRCLE_JOB: 'aio_preview' } } };
AFFECTS_SIGNIFICANT_FILES = true;
getGithubInfoSpy = spyOn(buildRetriever, 'getGithubInfo')
.and.callFake(() => Promise.resolve(BUILD_INFO));
getSignificantFilesChangedSpy = spyOn(buildVerifier, 'getSignificantFilesChanged')
.and.callFake(() => Promise.resolve(AFFECTS_SIGNIFICANT_FILES));
downloadBuildArtifactSpy = spyOn(buildRetriever, 'downloadBuildArtifact')
.and.callFake(() => Promise.resolve(DOWNLOADED_ARTIFACT_PATH));
getPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted')
.and.callFake(() => Promise.resolve(IS_PUBLIC));
createBuildSpy = spyOn(buildCreator, 'create');
});
it('should respond with 400 if the request body is not in the correct format', async () => {
await Promise.all([
agent.post(URL).expect(400),
agent.post(URL).send().expect(400),
agent.post(URL).send({}).expect(400),
agent.post(URL).send({ payload: {} }).expect(400),
agent.post(URL).send({ payload: { build_num: -1 } }).expect(400),
agent.post(URL).send({ payload: { build_num: 4000 } }).expect(400),
agent.post(URL).send({ payload: { build_num: 4000, build_parameters: { } } }).expect(400),
agent.post(URL).send({ payload: { build_num: 4000, build_parameters: { CIRCLE_JOB: '' } } }).expect(400),
]);
});
it('should create a preview if everything is good and the build succeeded', async () => {
await agent.post(URL).send(BASIC_PAYLOAD).expect(201);
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(getSignificantFilesChangedSpy).toHaveBeenCalledWith(PR, jasmine.any(RegExp));
expect(downloadBuildArtifactSpy).toHaveBeenCalledWith(BUILD_NUM, PR, SHA, defaultConfig.buildArtifactPath);
expect(getPrIsTrustedSpy).toHaveBeenCalledWith(PR);
expect(createBuildSpy).toHaveBeenCalledWith(PR, SHA, DOWNLOADED_ARTIFACT_PATH, IS_PUBLIC);
});
it('should respond with 204 if the reported build is not the "AIO preview" job', async () => {
BASIC_PAYLOAD.payload.build_parameters.CIRCLE_JOB = 'lint';
await agent.post(URL).send(BASIC_PAYLOAD).expect(204);
expect(getGithubInfoSpy).not.toHaveBeenCalled();
expect(getSignificantFilesChangedSpy).not.toHaveBeenCalled();
expect(console.log).toHaveBeenCalledWith(jasmine.any(String), 'PreviewServer: ',
'Build:12345, Job:lint -', 'Skipping preview processing because this is not the "aio_preview" job.');
expect(downloadBuildArtifactSpy).not.toHaveBeenCalled();
expect(getPrIsTrustedSpy).not.toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should respond with 204 if the build did not affect any significant files', async () => {
AFFECTS_SIGNIFICANT_FILES = false;
await agent.post(URL).send(BASIC_PAYLOAD).expect(204);
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(getSignificantFilesChangedSpy).toHaveBeenCalledWith(PR, jasmine.any(RegExp));
expect(console.log).toHaveBeenCalledWith(jasmine.any(String), 'PreviewServer: ',
'PR:777, Build:12345 - Skipping preview processing because this PR did not touch any significant files.');
expect(downloadBuildArtifactSpy).not.toHaveBeenCalled();
expect(getPrIsTrustedSpy).not.toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should respond with 201 if the build is trusted', async () => {
IS_PUBLIC = true;
await agent.post(URL).send(BASIC_PAYLOAD).expect(201);
});
it('should respond with 202 if the build is not trusted', async () => {
IS_PUBLIC = false;
await agent.post(URL).send(BASIC_PAYLOAD).expect(202);
});
it('should not create a preview if the build was not successful', async () => {
BUILD_INFO.success = false;
await agent.post(URL).send(BASIC_PAYLOAD).expect(204);
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(downloadBuildArtifactSpy).not.toHaveBeenCalled();
expect(getPrIsTrustedSpy).not.toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should fail if the CircleCI request fails', async () => {
// Note it is important to put the `reject` into `and.callFake`;
// If you just `and.returnValue` the rejected promise
// then you get an "unhandled rejection" message in the console.
getGithubInfoSpy.and.callFake(() => Promise.reject('Test Error'));
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(downloadBuildArtifactSpy).not.toHaveBeenCalled();
expect(getPrIsTrustedSpy).not.toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should fail if the Github organisation of the build does not match the configured organisation', async () => {
BUILD_INFO.org = 'bad';
await agent.post(URL).send(BASIC_PAYLOAD)
.expect(500, `Invalid webhook: expected "githubOrg" property to equal "organisation" but got "bad".`);
});
it('should fail if the Github repo of the build does not match the configured repo', async () => {
BUILD_INFO.repo = 'bad';
await agent.post(URL).send(BASIC_PAYLOAD)
.expect(500, `Invalid webhook: expected "githubRepo" property to equal "repo" but got "bad".`);
});
it('should fail if the artifact fetch request fails', async () => {
downloadBuildArtifactSpy.and.callFake(() => Promise.reject('Test Error'));
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
expect(getPrIsTrustedSpy).not.toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should fail if verifying the PR fails', async () => {
getPrIsTrustedSpy.and.callFake(() => Promise.reject('Test Error'));
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
expect(getPrIsTrustedSpy).toHaveBeenCalled();
expect(createBuildSpy).not.toHaveBeenCalled();
});
it('should fail if creating the preview build fails', async () => {
createBuildSpy.and.callFake(() => Promise.reject('Test Error'));
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
expect(getPrIsTrustedSpy).toHaveBeenCalled();
expect(createBuildSpy).toHaveBeenCalled();
});
});
describe('POST /pr-updated', () => {
const pr = '9';
const url = '/pr-updated';
let bvGetPrIsTrustedSpy: jasmine.Spy;
let bcUpdatePrVisibilitySpy: jasmine.Spy;
// Helpers
const createRequest = (num: number, action?: string) =>
agent.post(url).send({number: num, action});
beforeEach(() => {
bvGetPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted');
bcUpdatePrVisibilitySpy = spyOn(buildCreator, 'updatePrVisibility');
});
it('should respond with 404 for non-POST requests', async () => {
await verifyRequests([
agent.get(url).expect(404),
agent.put(url).expect(404),
agent.patch(url).expect(404),
agent.delete(url).expect(404),
]);
});
it('should respond with 400 for requests without a payload', async () => {
const responseBody = `Missing or empty 'number' field in request: POST ${url} {}`;
const request1 = agent.post(url);
const request2 = agent.post(url).send();
await verifyRequests([
request1.expect(400, responseBody),
request2.expect(400, responseBody),
]);
});
it('should respond with 400 for requests without a \'number\' field', async () => {
const responseBodyPrefix = `Missing or empty 'number' field in request: POST ${url}`;
const request1 = agent.post(url).send({});
const request2 = agent.post(url).send({number: null});
await verifyRequests([
request1.expect(400, `${responseBodyPrefix} {}`),
request2.expect(400, `${responseBodyPrefix} {"number":null}`),
]);
});
it('should call \'BuildVerifier#gtPrIsTrusted()\' with the correct arguments', async () => {
await promisifyRequest(createRequest(+pr));
expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9);
});
it('should propagate errors from BuildVerifier', async () => {
bvGetPrIsTrustedSpy.and.callFake(() => Promise.reject('Test'));
const req = createRequest(+pr).expect(500, 'Test');
await promisifyRequest(req);
expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9);
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
});
it('should call \'BuildCreator#updatePrVisibility()\' with the correct arguments', async () => {
bvGetPrIsTrustedSpy.and.callFake((pr2: number) => Promise.resolve(pr2 === 42));
await promisifyRequest(createRequest(24));
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(24, false);
await promisifyRequest(createRequest(42));
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(42, true);
});
it('should propagate errors from BuildCreator', async () => {
bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject('Test'));
const req = createRequest(+pr).expect(500, 'Test');
await verifyRequests([req]);
});
describe('on success', () => {
it('should respond with 200 (action: undefined)', async () => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num).expect(200, http.STATUS_CODES[200]));
await verifyRequests(reqs);
});
it('should respond with 200 (action: labeled)', async () => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num, 'labeled').expect(200, http.STATUS_CODES[200]));
await verifyRequests(reqs);
});
it('should respond with 200 (action: unlabeled)', async () => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num, 'unlabeled').expect(200, http.STATUS_CODES[200]));
await verifyRequests(reqs);
});
it('should respond with 200 (and do nothing) if \'action\' implies no visibility change', async () => {
const promises = ['foo', 'notlabeled'].
map(action => createRequest(+pr, action).expect(200, http.STATUS_CODES[200])).
map(promisifyRequest);
await Promise.all(promises);
expect(bvGetPrIsTrustedSpy).not.toHaveBeenCalled();
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
});
});
});
describe('ALL *', () => {
it('should respond with 404', async () => {
const responseFor = (method: string) => `Unknown resource in request: ${method.toUpperCase()} /some/url`;
await verifyRequests([
agent.get('/some/url').expect(404, responseFor('get')),
agent.put('/some/url').expect(404, responseFor('put')),
agent.post('/some/url').expect(404, responseFor('post')),
agent.patch('/some/url').expect(404, responseFor('patch')),
agent.delete('/some/url').expect(404, responseFor('delete')),
]);
});
});
});
});

View File

@ -0,0 +1,49 @@
import * as express from 'express';
import {PreviewServerError} from '../../lib/preview-server/preview-error';
import {respondWithError, throwRequestError} from '../../lib/preview-server/utils';
describe('preview-server/utils', () => {
describe('respondWithError', () => {
let endSpy: jasmine.Spy;
let statusSpy: jasmine.Spy;
let response: express.Response;
beforeEach(() => {
endSpy = jasmine.createSpy('end');
statusSpy = jasmine.createSpy('status').and.callFake(() => response);
response = {status: statusSpy, end: endSpy} as any;
});
it('should set the status on the response', () => {
respondWithError(response, new PreviewServerError(505, 'TEST MESSAGE'));
expect(statusSpy).toHaveBeenCalledWith(505);
expect(endSpy).toHaveBeenCalledWith('TEST MESSAGE', jasmine.any(Function));
});
it('should convert non-PreviewServerError errors to 500 PreviewServerErrors', () => {
respondWithError(response, new Error('OTHER MESSAGE'));
expect(statusSpy).toHaveBeenCalledWith(500);
expect(endSpy).toHaveBeenCalledWith('OTHER MESSAGE', jasmine.any(Function));
});
});
describe('throwRequestError', () => {
it('should throw a suitable error', () => {
let caught = false;
try {
const request = {
body: 'The request body',
method: 'POST',
originalUrl: 'some.domain.com/path',
} as express.Request;
throwRequestError(505, 'ERROR MESSAGE', request);
} catch (error) {
caught = true;
expect(error).toEqual(jasmine.any(PreviewServerError));
expect(error.status).toEqual(505);
expect(error.message).toEqual(`ERROR MESSAGE in request: POST some.domain.com/path "The request body"`);
}
expect(caught).toEqual(true);
});
});
});

View File

@ -1,303 +0,0 @@
// Imports
import * as jwt from 'jsonwebtoken';
import {GithubPullRequests, PullRequest} from '../../lib/common/github-pull-requests';
import {GithubTeams} from '../../lib/common/github-teams';
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../../lib/upload-server/build-verifier';
import {expectToBeUploadError} from './helpers';
// Tests
describe('BuildVerifier', () => {
const defaultConfig = {
allowedTeamSlugs: ['team1', 'team2'],
githubToken: 'githubToken',
organization: 'organization',
repoSlug: 'repo/slug',
secret: 'secret',
trustedPrLabel: 'trusted: pr-label',
};
let bv: BuildVerifier;
// Helpers
const createBuildVerifier = (partialConfig: Partial<typeof defaultConfig> = {}) => {
const cfg = {...defaultConfig, ...partialConfig} as typeof defaultConfig;
return new BuildVerifier(cfg.secret, cfg.githubToken, cfg.repoSlug, cfg.organization,
cfg.allowedTeamSlugs, cfg.trustedPrLabel);
};
beforeEach(() => bv = createBuildVerifier());
describe('constructor()', () => {
['secret', 'githubToken', 'repoSlug', 'organization', 'allowedTeamSlugs', 'trustedPrLabel'].
forEach(param => {
it(`should throw if '${param}' is missing or empty`, () => {
expect(() => createBuildVerifier({[param]: ''})).
toThrowError(`Missing or empty required parameter '${param}'!`);
});
});
it('should throw if \'allowedTeamSlugs\' is an empty array', () => {
expect(() => createBuildVerifier({allowedTeamSlugs: []})).
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
});
});
describe('getPrIsTrusted()', () => {
const pr = 9;
let mockPrInfo: PullRequest;
let prsFetchSpy: jasmine.Spy;
let teamsIsMemberBySlugSpy: jasmine.Spy;
beforeEach(() => {
mockPrInfo = {
labels: [
{name: 'foo'},
{name: 'bar'},
],
number: 9,
user: {login: 'username'},
};
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
and.returnValue(Promise.resolve(mockPrInfo));
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
and.returnValue(Promise.resolve(true));
});
it('should return a promise', done => {
const promise = bv.getPrIsTrusted(pr);
promise.then(done); // Do not complete the test (and release the spies) synchronously
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
expect(promise).toEqual(jasmine.any(Promise));
});
it('should fetch the corresponding PR', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
done();
});
});
it('should fail if fetching the PR errors', done => {
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
bv.getPrIsTrusted(pr).catch(err => {
expect(err).toBe('Test');
done();
});
});
describe('when the PR has the "trusted PR" label', () => {
beforeEach(() => mockPrInfo.labels.push({name: 'trusted: pr-label'}));
it('should resolve to true', done => {
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(true);
done();
});
});
it('should not try to verify the author\'s membership status', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(teamsIsMemberBySlugSpy).not.toHaveBeenCalled();
done();
});
});
});
describe('when the PR does not have the "trusted PR" label', () => {
it('should verify the PR author\'s membership in the specified teams', done => {
bv.getPrIsTrusted(pr).then(() => {
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
done();
});
});
it('should fail if verifying membership errors', done => {
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
bv.getPrIsTrusted(pr).catch(err => {
expect(err).toBe('Test');
done();
});
});
it('should resolve to true if the PR\'s author is a member', done => {
teamsIsMemberBySlugSpy.and.returnValue(Promise.resolve(true));
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(true);
done();
});
});
it('should resolve to false if the PR\'s author is not a member', done => {
teamsIsMemberBySlugSpy.and.returnValue(Promise.resolve(false));
bv.getPrIsTrusted(pr).then(isTrusted => {
expect(isTrusted).toBe(false);
done();
});
});
});
});
describe('verify()', () => {
const pr = 9;
const defaultJwt = {
'exp': Math.floor(Date.now() / 1000) + 30,
'iat': Math.floor(Date.now() / 1000) - 30,
'iss': 'Travis CI, GmbH',
'pull-request': pr,
'slug': defaultConfig.repoSlug,
};
let bvGetPrIsTrusted: jasmine.Spy;
// Heleprs
const createAuthHeader = (partialJwt: Partial<typeof defaultJwt> = {}, secret: string = defaultConfig.secret) =>
`Token ${jwt.sign({...defaultJwt, ...partialJwt}, secret)}`;
beforeEach(() => {
bvGetPrIsTrusted = spyOn(bv, 'getPrIsTrusted').and.returnValue(Promise.resolve(true));
});
it('should return a promise', done => {
const promise = bv.verify(pr, createAuthHeader());
promise.then(done); // Do not complete the test (and release the spies) synchronously
// to avoid running the actual `bvGetPrIsTrusted()`.
expect(promise).toEqual(jasmine.any(Promise));
});
it('should fail if the authorization header is invalid', done => {
bv.verify(pr, 'foo').catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: jwt malformed';
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should fail if the secret is invalid', done => {
bv.verify(pr, createAuthHeader({}, 'foo')).catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: invalid signature';
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should fail if the issuer is invalid', done => {
bv.verify(pr, createAuthHeader({iss: 'not valid'})).catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: ' +
`jwt issuer invalid. expected: ${defaultJwt.iss}`;
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should fail if the token has expired', done => {
bv.verify(pr, createAuthHeader({exp: 0})).catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: jwt expired';
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should fail if the repo slug does not match', done => {
bv.verify(pr, createAuthHeader({slug: 'foo/bar'})).catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: ' +
`jwt slug invalid. expected: ${defaultConfig.repoSlug}`;
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should fail if the PR does not match', done => {
bv.verify(pr, createAuthHeader({'pull-request': 1337})).catch(err => {
const errorMessage = 'Error while verifying upload for PR 9: ' +
`jwt pull-request invalid. expected: ${pr}`;
expectToBeUploadError(err, 403, errorMessage);
done();
});
});
it('should not fail if the token is valid', done => {
bv.verify(pr, createAuthHeader()).then(done);
});
it('should not fail even if the token has been issued in the future', done => {
const in30s = Math.floor(Date.now() / 1000) + 30;
bv.verify(pr, createAuthHeader({iat: in30s})).then(done);
});
it('should call \'getPrIsTrusted()\' if the token is valid', done => {
bv.verify(pr, createAuthHeader()).then(() => {
expect(bvGetPrIsTrusted).toHaveBeenCalledWith(pr);
done();
});
});
it('should fail if \'getPrIsTrusted()\' rejects', done => {
bvGetPrIsTrusted.and.callFake(() => Promise.reject('Test'));
bv.verify(pr, createAuthHeader()).catch(err => {
expectToBeUploadError(err, 403, `Error while verifying upload for PR ${pr}: Test`);
done();
});
});
it('should resolve to `verifiedNotTrusted` if \'getPrIsTrusted()\' returns false', done => {
bvGetPrIsTrusted.and.returnValue(Promise.resolve(false));
bv.verify(pr, createAuthHeader()).then(value => {
expect(value).toBe(BUILD_VERIFICATION_STATUS.verifiedNotTrusted);
done();
});
});
it('should resolve to `verifiedAndTrusted` if \'getPrIsTrusted()\' returns true', done => {
bv.verify(pr, createAuthHeader()).then(value => {
expect(value).toBe(BUILD_VERIFICATION_STATUS.verifiedAndTrusted);
done();
});
});
});
});

View File

@ -1,11 +0,0 @@
import {UploadError} from '../../lib/upload-server/upload-error';
export const expectToBeUploadError = (actual: UploadError, status?: number, message?: string) => {
expect(actual).toEqual(jasmine.any(UploadError));
if (status != null) {
expect(actual.status).toBe(status);
}
if (message != null) {
expect(actual.message).toBe(message);
}
};

View File

@ -1,39 +0,0 @@
// Imports
import {UploadError} from '../../lib/upload-server/upload-error';
// Tests
describe('UploadError', () => {
let err: UploadError;
beforeEach(() => err = new UploadError(999, 'message'));
it('should extend Error', () => {
expect(err).toEqual(jasmine.any(UploadError));
expect(err).toEqual(jasmine.any(Error));
expect(Object.getPrototypeOf(err)).toBe(UploadError.prototype);
});
it('should have a \'status\' property', () => {
expect(err.status).toBe(999);
});
it('should have a \'message\' property', () => {
expect(err.message).toBe('message');
});
it('should have a 500 \'status\' by default', () => {
expect(new UploadError().status).toBe(500);
});
it('should have an empty \'message\' by default', () => {
expect(new UploadError().message).toBe('');
expect(new UploadError(999).message).toBe('');
});
});

View File

@ -1,603 +0,0 @@
// Imports
import * as express from 'express';
import * as http from 'http';
import * as supertest from 'supertest';
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
import {BuildCreator} from '../../lib/upload-server/build-creator';
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../../lib/upload-server/build-verifier';
import {uploadServerFactory as usf} from '../../lib/upload-server/upload-server-factory';
// Tests
describe('uploadServerFactory', () => {
const defaultConfig = {
buildsDir: 'builds/dir',
domainName: 'domain.name',
githubOrganization: 'organization',
githubTeamSlugs: ['team1', 'team2'],
githubToken: '12345',
repoSlug: 'repo/slug',
secret: 'secret',
trustedPrLabel: 'trusted: pr-label',
};
// Helpers
const createUploadServer = (partialConfig: Partial<typeof defaultConfig> = {}) =>
usf.create({...defaultConfig, ...partialConfig} as typeof defaultConfig);
describe('create()', () => {
let usfCreateMiddlewareSpy: jasmine.Spy;
beforeEach(() => {
usfCreateMiddlewareSpy = spyOn(usf as any, 'createMiddleware').and.callThrough();
});
it('should throw if \'buildsDir\' is missing or empty', () => {
expect(() => createUploadServer({buildsDir: ''})).
toThrowError('Missing or empty required parameter \'buildsDir\'!');
});
it('should throw if \'domainName\' is missing or empty', () => {
expect(() => createUploadServer({domainName: ''})).
toThrowError('Missing or empty required parameter \'domainName\'!');
});
it('should throw if \'githubToken\' is missing or empty', () => {
expect(() => createUploadServer({githubToken: ''})).
toThrowError('Missing or empty required parameter \'githubToken\'!');
});
it('should throw if \'githubOrganization\' is missing or empty', () => {
expect(() => createUploadServer({githubOrganization: ''})).
toThrowError('Missing or empty required parameter \'organization\'!');
});
it('should throw if \'githubTeamSlugs\' is missing or empty', () => {
expect(() => createUploadServer({githubTeamSlugs: []})).
toThrowError('Missing or empty required parameter \'allowedTeamSlugs\'!');
});
it('should throw if \'repoSlug\' is missing or empty', () => {
expect(() => createUploadServer({repoSlug: ''})).
toThrowError('Missing or empty required parameter \'repoSlug\'!');
});
it('should throw if \'secret\' is missing or empty', () => {
expect(() => createUploadServer({secret: ''})).
toThrowError('Missing or empty required parameter \'secret\'!');
});
it('should throw if \'trustedPrLabel\' is missing or empty', () => {
expect(() => createUploadServer({trustedPrLabel: ''})).
toThrowError('Missing or empty required parameter \'trustedPrLabel\'!');
});
it('should return an http.Server', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
const server = createUploadServer();
expect(server).toBe(httpCreateServerSpy.calls.mostRecent().returnValue);
});
it('should create and use an appropriate BuildCreator', () => {
const usfCreateBuildCreatorSpy = spyOn(usf as any, 'createBuildCreator').and.callThrough();
createUploadServer();
const buildCreator: BuildCreator = usfCreateBuildCreatorSpy.calls.mostRecent().returnValue;
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(jasmine.any(BuildVerifier), buildCreator);
expect(usfCreateBuildCreatorSpy).toHaveBeenCalledWith('builds/dir', '12345', 'repo/slug', 'domain.name');
});
it('should create and use an appropriate middleware', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
createUploadServer();
const middleware: express.Express = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
const buildVerifier = jasmine.any(BuildVerifier);
const buildCreator = jasmine.any(BuildCreator);
expect(httpCreateServerSpy).toHaveBeenCalledWith(middleware);
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(buildVerifier, buildCreator);
});
it('should log the server address info on \'listening\'', () => {
const consoleInfoSpy = spyOn(console, 'info');
const server = createUploadServer();
server.address = () => ({address: 'foo', family: '', port: 1337});
expect(consoleInfoSpy).not.toHaveBeenCalled();
server.emit('listening');
expect(consoleInfoSpy).toHaveBeenCalledWith('Up and running (and listening on foo:1337)...');
});
});
// Protected methods
describe('createBuildCreator()', () => {
let buildCreator: BuildCreator;
beforeEach(() => {
buildCreator = (usf as any).createBuildCreator(
defaultConfig.buildsDir,
defaultConfig.githubToken,
defaultConfig.repoSlug,
defaultConfig.domainName,
);
});
it('should pass the \'buildsDir\' to the BuildCreator', () => {
expect((buildCreator as any).buildsDir).toBe('builds/dir');
});
describe('on \'build.created\'', () => {
let prsAddCommentSpy: jasmine.Spy;
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
it('should post a comment on GitHub for public previews', () => {
const commentBody = 'You can preview 1234567890 at https://pr42-1234567890.domain.name/.';
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
});
it('should not post a comment on GitHub for non-public previews', () => {
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: false});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
});
describe('on \'pr.changedVisibility\'', () => {
let prsAddCommentSpy: jasmine.Spy;
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
it('should post a comment on GitHub (for all SHAs) for PRs made public', () => {
const commentBody = 'You can preview 12345 at https://pr42-12345.domain.name/.\n' +
'You can preview 67890 at https://pr42-67890.domain.name/.';
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
});
it('should not post a comment on GitHub if no SHAs were affected', () => {
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: [], isPublic: true});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
it('should not post a comment on GitHub for PRs made non-public', () => {
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: false});
expect(prsAddCommentSpy).not.toHaveBeenCalled();
});
});
it('should pass the correct \'githubToken\' and \'repoSlug\' to GithubPullRequests', () => {
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
const allCalls = prsAddCommentSpy.calls.all();
const prs = allCalls[0].object;
expect(prsAddCommentSpy).toHaveBeenCalledTimes(2);
expect(prs).toBe(allCalls[1].object);
expect(prs).toEqual(jasmine.any(GithubPullRequests));
expect(prs.repoSlug).toBe('repo/slug');
expect(prs.requestHeaders.Authorization).toContain('12345');
});
});
describe('createMiddleware()', () => {
let buildVerifier: BuildVerifier;
let buildCreator: BuildCreator;
let agent: supertest.SuperTest<supertest.Test>;
// Helpers
const promisifyRequest = (req: supertest.Request) =>
new Promise((resolve, reject) => req.end(err => err ? reject(err) : resolve()));
const verifyRequests = (reqs: supertest.Request[], done: jasmine.DoneFn) =>
Promise.all(reqs.map(promisifyRequest)).then(done, done.fail);
beforeEach(() => {
buildVerifier = new BuildVerifier(
defaultConfig.secret,
defaultConfig.githubToken,
defaultConfig.repoSlug,
defaultConfig.githubOrganization,
defaultConfig.githubTeamSlugs,
defaultConfig.trustedPrLabel,
);
buildCreator = new BuildCreator(defaultConfig.buildsDir);
agent = supertest.agent((usf as any).createMiddleware(buildVerifier, buildCreator));
spyOn(console, 'error');
});
describe('GET /create-build/<pr>/<sha>', () => {
const pr = '9';
const sha = '9'.repeat(40);
let buildVerifierVerifySpy: jasmine.Spy;
let buildCreatorCreateSpy: jasmine.Spy;
beforeEach(() => {
const verStatus = BUILD_VERIFICATION_STATUS.verifiedAndTrusted;
buildVerifierVerifySpy = spyOn(buildVerifier, 'verify').and.returnValue(Promise.resolve(verStatus));
buildCreatorCreateSpy = spyOn(buildCreator, 'create').and.returnValue(Promise.resolve());
});
it('should respond with 404 for non-GET requests', done => {
verifyRequests([
agent.put(`/create-build/${pr}/${sha}`).expect(404),
agent.post(`/create-build/${pr}/${sha}`).expect(404),
agent.patch(`/create-build/${pr}/${sha}`).expect(404),
agent.delete(`/create-build/${pr}/${sha}`).expect(404),
], done);
});
it('should respond with 401 for requests without an \'AUTHORIZATION\' header', done => {
const url = `/create-build/${pr}/${sha}`;
const responseBody = `Missing or empty 'AUTHORIZATION' header in request: GET ${url}`;
verifyRequests([
agent.get(url).expect(401, responseBody),
agent.get(url).set('AUTHORIZATION', '').expect(401, responseBody),
], done);
});
it('should respond with 400 for requests without an \'X-FILE\' header', done => {
const url = `/create-build/${pr}/${sha}`;
const responseBody = `Missing or empty 'X-FILE' header in request: GET ${url}`;
const request1 = agent.get(url).set('AUTHORIZATION', 'foo');
const request2 = agent.get(url).set('AUTHORIZATION', 'foo').set('X-FILE', '');
verifyRequests([
request1.expect(400, responseBody),
request2.expect(400, responseBody),
], done);
});
it('should respond with 404 for unknown paths', done => {
verifyRequests([
agent.get(`/foo/create-build/${pr}/${sha}`).expect(404),
agent.get(`/foo-create-build/${pr}/${sha}`).expect(404),
agent.get(`/fooncreate-build/${pr}/${sha}`).expect(404),
agent.get(`/create-build/foo/${pr}/${sha}`).expect(404),
agent.get(`/create-build-foo/${pr}/${sha}`).expect(404),
agent.get(`/create-buildnfoo/${pr}/${sha}`).expect(404),
agent.get(`/create-build/pr${pr}/${sha}`).expect(404),
agent.get(`/create-build/${pr}/${sha}42`).expect(404),
], done);
});
it('should call \'BuildVerifier#verify()\' with the correct arguments', done => {
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('AUTHORIZATION', 'foo').
set('X-FILE', 'bar');
promisifyRequest(req).
then(() => expect(buildVerifierVerifySpy).toHaveBeenCalledWith(9, 'foo')).
then(done, done.fail);
});
it('should propagate errors from BuildVerifier', done => {
buildVerifierVerifySpy.and.callFake(() => Promise.reject('Test'));
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('AUTHORIZATION', 'foo').
set('X-FILE', 'bar').
expect(500, 'Test');
promisifyRequest(req).
then(() => {
expect(buildVerifierVerifySpy).toHaveBeenCalledWith(9, 'foo');
expect(buildCreatorCreateSpy).not.toHaveBeenCalled();
}).
then(done, done.fail);
});
it('should call \'BuildCreator#create()\' with the correct arguments', done => {
buildVerifierVerifySpy.and.returnValues(
Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedAndTrusted),
Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
const req1 = agent.get(`/create-build/${pr}/${sha}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
const req2 = agent.get(`/create-build/${pr}/${sha}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
Promise.all([
promisifyRequest(req1).then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar', true)),
promisifyRequest(req2).then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar', false)),
]).then(done, done.fail);
});
it('should propagate errors from BuildCreator', done => {
buildCreatorCreateSpy.and.callFake(() => Promise.reject('Test'));
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('AUTHORIZATION', 'foo').
set('X-FILE', 'bar').
expect(500, 'Test');
verifyRequests([req], done);
});
it('should respond with 201 on successful upload (for public builds)', done => {
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('AUTHORIZATION', 'foo').
set('X-FILE', 'bar').
expect(201, http.STATUS_CODES[201]);
verifyRequests([req], done);
});
it('should respond with 202 on successful upload (for hidden builds)', done => {
buildVerifierVerifySpy.and.returnValue(Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('AUTHORIZATION', 'foo').
set('X-FILE', 'bar').
expect(202, http.STATUS_CODES[202]);
verifyRequests([req], done);
});
it('should reject PRs with leading zeros', done => {
verifyRequests([agent.get(`/create-build/0${pr}/${sha}`).expect(404)], done);
});
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
const sha40 = '0'.repeat(40);
const sha41 = `0${sha40}`;
const request40 = agent.get(`/create-build/${pr}/${sha40}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
const request41 = agent.get(`/create-build/${pr}/${sha41}`).set('AUTHORIZATION', 'baz').set('X-FILE', 'qux');
Promise.all([
promisifyRequest(request40.expect(201)),
promisifyRequest(request41.expect(404)),
]).then(done, done.fail);
});
});
describe('GET /health-check', () => {
it('should respond with 200', done => {
verifyRequests([
agent.get('/health-check').expect(200),
agent.get('/health-check/').expect(200),
], done);
});
it('should respond with 404 for non-GET requests', done => {
verifyRequests([
agent.put('/health-check').expect(404),
agent.post('/health-check').expect(404),
agent.patch('/health-check').expect(404),
agent.delete('/health-check').expect(404),
], done);
});
it('should respond with 404 if the path does not match exactly', done => {
verifyRequests([
agent.get('/health-check/foo').expect(404),
agent.get('/health-check-foo').expect(404),
agent.get('/health-checknfoo').expect(404),
agent.get('/foo/health-check').expect(404),
agent.get('/foo-health-check').expect(404),
agent.get('/foonhealth-check').expect(404),
], done);
});
});
describe('POST /pr-updated', () => {
const pr = '9';
const url = '/pr-updated';
let bvGetPrIsTrustedSpy: jasmine.Spy;
let bcUpdatePrVisibilitySpy: jasmine.Spy;
// Helpers
const createRequest = (num: number, action?: string) =>
agent.post(url).send({number: num, action});
beforeEach(() => {
bvGetPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted');
bcUpdatePrVisibilitySpy = spyOn(buildCreator, 'updatePrVisibility');
});
it('should respond with 404 for non-POST requests', done => {
verifyRequests([
agent.get(url).expect(404),
agent.put(url).expect(404),
agent.patch(url).expect(404),
agent.delete(url).expect(404),
], done);
});
it('should respond with 400 for requests without a payload', done => {
const responseBody = `Missing or empty 'number' field in request: POST ${url} {}`;
const request1 = agent.post(url);
const request2 = agent.post(url).send();
verifyRequests([
request1.expect(400, responseBody),
request2.expect(400, responseBody),
], done);
});
it('should respond with 400 for requests without a \'number\' field', done => {
const responseBodyPrefix = `Missing or empty 'number' field in request: POST ${url}`;
const request1 = agent.post(url).send({});
const request2 = agent.post(url).send({number: null});
verifyRequests([
request1.expect(400, `${responseBodyPrefix} {}`),
request2.expect(400, `${responseBodyPrefix} {"number":null}`),
], done);
});
it('should call \'BuildVerifier#gtPrIsTrusted()\' with the correct arguments', done => {
const req = createRequest(+pr);
promisifyRequest(req).
then(() => expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9)).
then(done, done.fail);
});
it('should propagate errors from BuildVerifier', done => {
bvGetPrIsTrustedSpy.and.callFake(() => Promise.reject('Test'));
const req = createRequest(+pr).expect(500, 'Test');
promisifyRequest(req).
then(() => {
expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9);
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
}).
then(done, done.fail);
});
it('should call \'BuildCreator#updatePrVisibility()\' with the correct arguments', done => {
bvGetPrIsTrustedSpy.and.callFake((pr2: number) => Promise.resolve(pr2 === 42));
const req1 = createRequest(24);
const req2 = createRequest(42);
Promise.all([
promisifyRequest(req1).then(() => expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith('24', false)),
promisifyRequest(req2).then(() => expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith('42', true)),
]).then(done, done.fail);
});
it('should propagate errors from BuildCreator', done => {
bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject('Test'));
const req = createRequest(+pr).expect(500, 'Test');
verifyRequests([req], done);
});
describe('on success', () => {
it('should respond with 200 (action: undefined)', done => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num).expect(200, http.STATUS_CODES[200]));
verifyRequests(reqs, done);
});
it('should respond with 200 (action: labeled)', done => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num, 'labeled').expect(200, http.STATUS_CODES[200]));
verifyRequests(reqs, done);
});
it('should respond with 200 (action: unlabeled)', done => {
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
const reqs = [4, 2].map(num => createRequest(num, 'unlabeled').expect(200, http.STATUS_CODES[200]));
verifyRequests(reqs, done);
});
it('should respond with 200 (and do nothing) if \'action\' implies no visibility change', done => {
const promises = ['foo', 'notlabeled'].
map(action => createRequest(+pr, action).expect(200, http.STATUS_CODES[200])).
map(promisifyRequest);
Promise.all(promises).
then(() => {
expect(bvGetPrIsTrustedSpy).not.toHaveBeenCalled();
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
}).
then(done, done.fail);
});
});
});
describe('ALL *', () => {
it('should respond with 404', done => {
const responseFor = (method: string) => `Unknown resource in request: ${method.toUpperCase()} /some/url`;
verifyRequests([
agent.get('/some/url').expect(404, responseFor('get')),
agent.put('/some/url').expect(404, responseFor('put')),
agent.post('/some/url').expect(404, responseFor('post')),
agent.patch('/some/url').expect(404, responseFor('patch')),
agent.delete('/some/url').expect(404, responseFor('delete')),
], done);
});
});
});
});

View File

@ -40,12 +40,6 @@
version "2.6.0" version "2.6.0"
resolved "https://registry.yarnpkg.com/@types/jasmine/-/jasmine-2.6.0.tgz#997b41a27752b4850af2683bc4a8d8222c25bd02" resolved "https://registry.yarnpkg.com/@types/jasmine/-/jasmine-2.6.0.tgz#997b41a27752b4850af2683bc4a8d8222c25bd02"
"@types/jsonwebtoken@^7.2.3":
version "7.2.3"
resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-7.2.3.tgz#483c8f39945e1e6d308dcc51fd4aeca5208d4dca"
dependencies:
"@types/node" "*"
"@types/mime@*": "@types/mime@*":
version "1.3.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.0.tgz#d24ffac7d1006fe68517202fb2aeba3dbe48284b" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.0.tgz#d24ffac7d1006fe68517202fb2aeba3dbe48284b"
@ -54,6 +48,18 @@
version "3.0.1" version "3.0.1"
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.1.tgz#b683eb60be358304ef146f5775db4c0e3696a550" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.1.tgz#b683eb60be358304ef146f5775db4c0e3696a550"
"@types/nock@^9.1.3":
version "9.1.3"
resolved "https://registry.yarnpkg.com/@types/nock/-/nock-9.1.3.tgz#1d445679375b9e25afd449dc56585f81729454e8"
dependencies:
"@types/node" "*"
"@types/node-fetch@^1.6.8":
version "1.6.8"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-1.6.8.tgz#a59d8c75b300ddc3ca3eef23d449d677f9486c3d"
dependencies:
"@types/node" "*"
"@types/node@*": "@types/node@*":
version "7.0.31" version "7.0.31"
resolved "https://registry.yarnpkg.com/@types/node/-/node-7.0.31.tgz#80ea4d175599b2a00149c29a10a4eb2dff592e86" resolved "https://registry.yarnpkg.com/@types/node/-/node-7.0.31.tgz#80ea4d175599b2a00149c29a10a4eb2dff592e86"
@ -112,6 +118,12 @@ ansi-align@^2.0.0:
dependencies: dependencies:
string-width "^2.0.0" string-width "^2.0.0"
ansi-green@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/ansi-green/-/ansi-green-0.1.1.tgz#8a5d9a979e458d57c40e33580b37390b8e10d0f7"
dependencies:
ansi-wrap "0.1.0"
ansi-regex@^0.2.0, ansi-regex@^0.2.1: ansi-regex@^0.2.0, ansi-regex@^0.2.1:
version "0.2.1" version "0.2.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9"
@ -128,6 +140,10 @@ ansi-styles@^2.2.1:
version "2.2.1" version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
ansi-wrap@0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf"
anymatch@^1.3.0: anymatch@^1.3.0:
version "1.3.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507"
@ -180,6 +196,10 @@ assert-plus@^0.2.0:
version "0.2.0" version "0.2.0"
resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234"
assertion-error@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b"
async-each@^1.0.0: async-each@^1.0.0:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d"
@ -208,10 +228,6 @@ balanced-match@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
base64url@2.0.0, base64url@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/base64url/-/base64url-2.0.0.tgz#eac16e03ea1438eff9423d69baa36262ed1f70bb"
bcrypt-pbkdf@^1.0.0: bcrypt-pbkdf@^1.0.0:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d"
@ -222,6 +238,13 @@ binary-extensions@^1.0.0:
version "1.8.0" version "1.8.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.8.0.tgz#48ec8d16df4377eae5fa5884682480af4d95c774" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.8.0.tgz#48ec8d16df4377eae5fa5884682480af4d95c774"
bl@^1.0.0:
version "1.2.2"
resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.2.tgz#a160911717103c07410cef63ef51b397c025af9c"
dependencies:
readable-stream "^2.3.5"
safe-buffer "^5.1.1"
block-stream@*: block-stream@*:
version "0.0.9" version "0.0.9"
resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a"
@ -276,9 +299,20 @@ braces@^1.8.2:
preserve "^0.2.0" preserve "^0.2.0"
repeat-element "^1.1.2" repeat-element "^1.1.2"
buffer-equal-constant-time@1.0.1: buffer-alloc-unsafe@^0.1.0:
version "1.0.1" version "0.1.1"
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-0.1.1.tgz#ffe1f67551dd055737de253337bfe853dfab1a6a"
buffer-alloc@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.1.0.tgz#05514d33bf1656d3540c684f65b1202e90eca303"
dependencies:
buffer-alloc-unsafe "^0.1.0"
buffer-fill "^0.1.0"
buffer-fill@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-0.1.1.tgz#76d825c4d6e50e06b7a31eb520c04d08cc235071"
bytes@3.0.0: bytes@3.0.0:
version "3.0.0" version "3.0.0"
@ -296,6 +330,17 @@ caseless@~0.12.0:
version "0.12.0" version "0.12.0"
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
chai@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chai/-/chai-4.1.2.tgz#0f64584ba642f0f2ace2806279f4f06ca23ad73c"
dependencies:
assertion-error "^1.0.1"
check-error "^1.0.1"
deep-eql "^3.0.0"
get-func-name "^2.0.0"
pathval "^1.0.0"
type-detect "^4.0.0"
chalk@0.5.1: chalk@0.5.1:
version "0.5.1" version "0.5.1"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174" resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174"
@ -316,6 +361,10 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3:
strip-ansi "^3.0.0" strip-ansi "^3.0.0"
supports-color "^2.0.0" supports-color "^2.0.0"
check-error@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82"
chokidar@^1.7.0: chokidar@^1.7.0:
version "1.7.0" version "1.7.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468"
@ -476,6 +525,22 @@ debug@^2.2.0:
dependencies: dependencies:
ms "2.0.0" ms "2.0.0"
debug@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
dependencies:
ms "2.0.0"
deep-eql@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df"
dependencies:
type-detect "^4.0.0"
deep-equal@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5"
deep-extend@~0.4.0: deep-extend@~0.4.0:
version "0.4.2" version "0.4.2"
resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f"
@ -488,6 +553,14 @@ delegates@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
delete-empty@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/delete-empty/-/delete-empty-2.0.0.tgz#dcf7c4f93a98445119acd57b137d13e7af78fa39"
dependencies:
log-ok "^0.1.1"
relative "^3.0.2"
rimraf "^2.6.2"
depd@1.1.1, depd@~1.1.1: depd@1.1.1, depd@~1.1.1:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359"
@ -520,13 +593,6 @@ ecc-jsbn@~0.1.1:
dependencies: dependencies:
jsbn "~0.1.0" jsbn "~0.1.0"
ecdsa-sig-formatter@1.0.9:
version "1.0.9"
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.9.tgz#4bc926274ec3b5abb5016e7e1d60921ac262b2a1"
dependencies:
base64url "^2.0.0"
safe-buffer "^5.0.1"
ee-first@1.1.1: ee-first@1.1.1:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
@ -535,6 +601,12 @@ encodeurl@~1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.1.tgz#79e3d58655346909fe6f0f45a5de68103b294d20" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.1.tgz#79e3d58655346909fe6f0f45a5de68103b294d20"
end-of-stream@^1.0.0:
version "1.4.1"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
dependencies:
once "^1.4.0"
es6-promise@^3.3.1: es6-promise@^3.3.1:
version "3.3.1" version "3.3.1"
resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-3.3.1.tgz#a08cdde84ccdbf34d027a1451bc91d4bcd28a613" resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-3.3.1.tgz#a08cdde84ccdbf34d027a1451bc91d4bcd28a613"
@ -713,6 +785,10 @@ from@~0:
version "0.1.7" version "0.1.7"
resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe"
fs-constants@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
fs.realpath@^1.0.0: fs.realpath@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
@ -754,6 +830,10 @@ gauge@~2.7.3:
strip-ansi "^3.0.1" strip-ansi "^3.0.1"
wide-align "^1.1.0" wide-align "^1.1.0"
get-func-name@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41"
get-stream@^3.0.0: get-stream@^3.0.0:
version "3.0.0" version "3.0.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
@ -892,7 +972,7 @@ inflight@^1.0.4:
once "^1.3.0" once "^1.3.0"
wrappy "1" wrappy "1"
inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.1: inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3:
version "2.0.3" version "2.0.3"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
@ -1044,7 +1124,7 @@ json-stable-stringify@^1.0.1:
dependencies: dependencies:
jsonify "~0.0.0" jsonify "~0.0.0"
json-stringify-safe@~5.0.1: json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
version "5.0.1" version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
@ -1052,21 +1132,6 @@ jsonify@~0.0.0:
version "0.0.0" version "0.0.0"
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
jsonwebtoken@^8.0.1:
version "8.0.1"
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.0.1.tgz#50daef8d0a8c7de2cd06bc1013b75b04ccf3f0cf"
dependencies:
jws "^3.1.4"
lodash.includes "^4.3.0"
lodash.isboolean "^3.0.3"
lodash.isinteger "^4.0.4"
lodash.isnumber "^3.0.3"
lodash.isplainobject "^4.0.6"
lodash.isstring "^4.0.1"
lodash.once "^4.0.0"
ms "^2.0.0"
xtend "^4.0.1"
jsprim@^1.2.2: jsprim@^1.2.2:
version "1.4.0" version "1.4.0"
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918"
@ -1076,23 +1141,6 @@ jsprim@^1.2.2:
json-schema "0.2.3" json-schema "0.2.3"
verror "1.3.6" verror "1.3.6"
jwa@^1.1.4:
version "1.1.5"
resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.1.5.tgz#a0552ce0220742cd52e153774a32905c30e756e5"
dependencies:
base64url "2.0.0"
buffer-equal-constant-time "1.0.1"
ecdsa-sig-formatter "1.0.9"
safe-buffer "^5.0.1"
jws@^3.1.4:
version "3.1.4"
resolved "https://registry.yarnpkg.com/jws/-/jws-3.1.4.tgz#f9e8b9338e8a847277d6444b1464f61880e050a2"
dependencies:
base64url "^2.0.0"
jwa "^1.1.4"
safe-buffer "^5.0.1"
kind-of@^3.0.2: kind-of@^3.0.2:
version "3.2.2" version "3.2.2"
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
@ -1157,10 +1205,6 @@ lodash.defaults@^3.1.2:
lodash.assign "^3.0.0" lodash.assign "^3.0.0"
lodash.restparam "^3.0.0" lodash.restparam "^3.0.0"
lodash.includes@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
lodash.isarguments@^3.0.0: lodash.isarguments@^3.0.0:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a"
@ -1169,26 +1213,6 @@ lodash.isarray@^3.0.0:
version "3.0.4" version "3.0.4"
resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55"
lodash.isboolean@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
lodash.isinteger@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
lodash.isnumber@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
lodash.isplainobject@^4.0.6:
version "4.0.6"
resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
lodash.isstring@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
lodash.keys@^3.0.0: lodash.keys@^3.0.0:
version "3.1.2" version "3.1.2"
resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a" resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a"
@ -1197,18 +1221,25 @@ lodash.keys@^3.0.0:
lodash.isarguments "^3.0.0" lodash.isarguments "^3.0.0"
lodash.isarray "^3.0.0" lodash.isarray "^3.0.0"
lodash.once@^4.0.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
lodash.restparam@^3.0.0: lodash.restparam@^3.0.0:
version "3.6.1" version "3.6.1"
resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805" resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805"
lodash@^4.17.5:
version "4.17.5"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511"
lodash@^4.5.1: lodash@^4.5.1:
version "4.17.4" version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
log-ok@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/log-ok/-/log-ok-0.1.1.tgz#bea3dd36acd0b8a7240d78736b5b97c65444a334"
dependencies:
ansi-green "^0.1.1"
success-symbol "^0.1.0"
lowercase-keys@^1.0.0: lowercase-keys@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306"
@ -1288,13 +1319,13 @@ minimist@^1.2.0:
version "1.2.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
"mkdirp@>=0.5 0", mkdirp@^0.5.1: "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1:
version "0.5.1" version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
dependencies: dependencies:
minimist "0.0.8" minimist "0.0.8"
ms@2.0.0, ms@^2.0.0: ms@2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
@ -1306,6 +1337,24 @@ negotiator@0.6.1:
version "0.6.1" version "0.6.1"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
nock@^9.2.5:
version "9.2.5"
resolved "https://registry.yarnpkg.com/nock/-/nock-9.2.5.tgz#c131fc8d3c4723f386be0269739638be84733f2f"
dependencies:
chai "^4.1.2"
debug "^3.1.0"
deep-equal "^1.0.0"
json-stringify-safe "^5.0.1"
lodash "^4.17.5"
mkdirp "^0.5.0"
propagate "^1.0.0"
qs "^6.5.1"
semver "^5.5.0"
node-fetch@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.1.2.tgz#ab884e8e7e57e38a944753cec706f788d1768bb5"
node-pre-gyp@^0.6.36: node-pre-gyp@^0.6.36:
version "0.6.36" version "0.6.36"
resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786"
@ -1394,7 +1443,7 @@ on-finished@~2.3.0:
dependencies: dependencies:
ee-first "1.1.1" ee-first "1.1.1"
once@^1.3.0, once@^1.3.3: once@^1.3.0, once@^1.3.3, once@^1.4.0:
version "1.4.0" version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
dependencies: dependencies:
@ -1457,6 +1506,10 @@ path-to-regexp@0.1.7:
version "0.1.7" version "0.1.7"
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
pathval@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0"
pause-stream@0.0.11: pause-stream@0.0.11:
version "0.0.11" version "0.0.11"
resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445"
@ -1483,6 +1536,14 @@ process-nextick-args@~1.0.6:
version "1.0.7" version "1.0.7"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3"
process-nextick-args@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
propagate@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/propagate/-/propagate-1.0.0.tgz#00c2daeedda20e87e3782b344adba1cddd6ad709"
proxy-addr@~1.1.5: proxy-addr@~1.1.5:
version "1.1.5" version "1.1.5"
resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-1.1.5.tgz#71c0ee3b102de3f202f3b64f608d173fcba1a918" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-1.1.5.tgz#71c0ee3b102de3f202f3b64f608d173fcba1a918"
@ -1508,7 +1569,7 @@ qs@6.5.0:
version "6.5.0" version "6.5.0"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.0.tgz#8d04954d364def3efc55b5a0793e1e2c8b1e6e49" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.0.tgz#8d04954d364def3efc55b5a0793e1e2c8b1e6e49"
qs@6.5.1: qs@6.5.1, qs@^6.5.1:
version "6.5.1" version "6.5.1"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
@ -1545,6 +1606,18 @@ rc@^1.0.1, rc@^1.1.6, rc@^1.1.7:
minimist "^1.2.0" minimist "^1.2.0"
strip-json-comments "~2.0.1" strip-json-comments "~2.0.1"
readable-stream@^2.0.0, readable-stream@^2.3.5:
version "2.3.6"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4: readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4:
version "2.2.11" version "2.2.11"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.11.tgz#0796b31f8d7688007ff0b93a8088d34aa17c0f72" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.11.tgz#0796b31f8d7688007ff0b93a8088d34aa17c0f72"
@ -1592,6 +1665,12 @@ registry-url@^3.0.3:
dependencies: dependencies:
rc "^1.0.1" rc "^1.0.1"
relative@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/relative/-/relative-3.0.2.tgz#0dcd8ec54a5d35a3c15e104503d65375b5a5367f"
dependencies:
isobject "^2.0.0"
remove-trailing-separator@^1.0.1: remove-trailing-separator@^1.0.1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.2.tgz#69b062d978727ad14dc6b56ba4ab772fd8d70511" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.2.tgz#69b062d978727ad14dc6b56ba4ab772fd8d70511"
@ -1649,6 +1728,12 @@ rimraf@2, rimraf@^2.5.1, rimraf@^2.6.1:
dependencies: dependencies:
glob "^7.0.5" glob "^7.0.5"
rimraf@^2.6.2:
version "2.6.2"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36"
dependencies:
glob "^7.0.5"
rx@2.3.24: rx@2.3.24:
version "2.3.24" version "2.3.24"
resolved "https://registry.yarnpkg.com/rx/-/rx-2.3.24.tgz#14f950a4217d7e35daa71bbcbe58eff68ea4b2b7" resolved "https://registry.yarnpkg.com/rx/-/rx-2.3.24.tgz#14f950a4217d7e35daa71bbcbe58eff68ea4b2b7"
@ -1657,6 +1742,10 @@ safe-buffer@^5.0.1:
version "5.1.0" version "5.1.0"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.0.tgz#fe4c8460397f9eaaaa58e73be46273408a45e223" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.0.tgz#fe4c8460397f9eaaaa58e73be46273408a45e223"
safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
safe-buffer@~5.0.1: safe-buffer@~5.0.1:
version "5.0.1" version "5.0.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7"
@ -1671,6 +1760,10 @@ semver@^5.0.3, semver@^5.1.0, semver@^5.3.0:
version "5.3.0" version "5.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
semver@^5.5.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab"
send@0.15.4: send@0.15.4:
version "0.15.4" version "0.15.4"
resolved "https://registry.yarnpkg.com/send/-/send-0.15.4.tgz#985faa3e284b0273c793364a35c6737bd93905b9" resolved "https://registry.yarnpkg.com/send/-/send-0.15.4.tgz#985faa3e284b0273c793364a35c6737bd93905b9"
@ -1710,9 +1803,9 @@ setprototypeof@1.0.3:
version "1.0.3" version "1.0.3"
resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04"
shelljs@^0.7.8: shelljs@^0.8.1:
version "0.7.8" version "0.8.1"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.8.tgz#decbcf874b0d1e5fb72e14b164a9683048e9acb3" resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.1.tgz#729e038c413a2254c4078b95ed46e0397154a9f1"
dependencies: dependencies:
glob "^7.0.0" glob "^7.0.0"
interpret "^1.0.0" interpret "^1.0.0"
@ -1787,6 +1880,12 @@ string_decoder@~1.0.0:
dependencies: dependencies:
safe-buffer "~5.0.1" safe-buffer "~5.0.1"
string_decoder@~1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
dependencies:
safe-buffer "~5.1.0"
stringstream@~0.0.4: stringstream@~0.0.4:
version "0.0.5" version "0.0.5"
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
@ -1811,6 +1910,10 @@ strip-json-comments@~2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
success-symbol@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/success-symbol/-/success-symbol-0.1.0.tgz#24022e486f3bf1cdca094283b769c472d3b72897"
superagent@^3.0.0: superagent@^3.0.0:
version "3.5.2" version "3.5.2"
resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.5.2.tgz#3361a3971567504c351063abeaae0faa23dbf3f8" resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.5.2.tgz#3361a3971567504c351063abeaae0faa23dbf3f8"
@ -1860,6 +1963,18 @@ tar-pack@^3.4.0:
tar "^2.2.1" tar "^2.2.1"
uid-number "^0.0.6" uid-number "^0.0.6"
tar-stream@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.0.tgz#a50efaa7b17760b82c27b3cae4a301a8254a5715"
dependencies:
bl "^1.0.0"
buffer-alloc "^1.1.0"
end-of-stream "^1.0.0"
fs-constants "^1.0.0"
readable-stream "^2.0.0"
to-buffer "^1.1.0"
xtend "^4.0.0"
tar@^2.2.1: tar@^2.2.1:
version "2.2.1" version "2.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1"
@ -1882,6 +1997,10 @@ timed-out@^4.0.0:
version "4.0.1" version "4.0.1"
resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f"
to-buffer@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80"
touch@^3.1.0: touch@^3.1.0:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b" resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
@ -1939,6 +2058,10 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0:
version "0.14.5" version "0.14.5"
resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
type-detect@^4.0.0:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
type-is@~1.6.15: type-is@~1.6.15:
version "1.6.15" version "1.6.15"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.15.tgz#cab10fb4909e441c82842eafe1ad646c81804410" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.15.tgz#cab10fb4909e441c82842eafe1ad646c81804410"
@ -2047,7 +2170,7 @@ xdg-basedir@^3.0.0:
version "3.0.0" version "3.0.0"
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4"
xtend@^4.0.1: xtend@^4.0.0:
version "4.0.1" version "4.0.1"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"

View File

View File

@ -0,0 +1,12 @@
# Link the scripts on the host to the scripts in the container
# - the host scripts are mounted as a volume at `/dockerbuild`)
# - the original scripts are moved to `..._prod` in case they are needed later
# See `aio/aio-builds-setup/docs/misc--debug-docker-container.md` for more info
mv $AIO_SCRIPTS_SH_DIR ${AIO_SCRIPTS_SH_DIR}_prod
ln -s /dockerbuild/scripts-sh $AIO_SCRIPTS_SH_DIR
chmod a+x $AIO_SCRIPTS_SH_DIR/*
mv $AIO_SCRIPTS_JS_DIR ${AIO_SCRIPTS_JS_DIR}_prod
ln -s /dockerbuild/scripts-js $AIO_SCRIPTS_JS_DIR

View File

@ -30,7 +30,7 @@ done
# Check servers # Check servers
origins=( origins=(
http://$AIO_UPLOAD_HOSTNAME:$AIO_UPLOAD_PORT http://$AIO_PREVIEW_SERVER_HOSTNAME:$AIO_PREVIEW_SERVER_PORT
http://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTP http://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTP
https://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTPS https://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTPS
) )

2
aio/aio-builds-setup/dockerbuild/scripts-sh/init.sh Executable file → Normal file
View File

@ -14,5 +14,5 @@ service cron start
service dnsmasq start service dnsmasq start
service nginx start service nginx start
service pm2-root start service pm2-root start
aio-upload-server-prod start aio-preview-server-prod start
echo [`date`] - Services started successfully. echo [`date`] - Services started successfully.

View File

@ -0,0 +1,14 @@
#!/bin/bash
set -eu -o pipefail
# Set up env variables for production
export AIO_CIRCLE_CI_TOKEN=$(head -c -1 /aio-secrets/CIRCLE_CI_TOKEN 2>/dev/null || echo "MISSING_CIRCLE_CI_TOKEN")
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null || echo "MISSING_GITHUB_TOKEN")
# Start the preview-server instance
action=$([ "$1" == "stop" ] && echo "stop" || echo "start")
pm2 $action $AIO_SCRIPTS_JS_DIR/dist/lib/preview-server \
--uid $AIO_WWW_USER \
--log /var/log/aio/preview-server-prod.log \
--name aio-preview-server-prod \
${@:2}

View File

@ -0,0 +1,16 @@
#!/bin/bash
set -eu -o pipefail
# Start the preview-server instance
appName=aio-preview-server-test
if [[ "$1" == "stop" ]]; then
pm2 delete $appName
else
source aio-test-env
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup/start-test-preview-server.js \
--uid $AIO_WWW_USER \
--log /var/log/aio/preview-server-test.log \
--name $appName \
--no-autorestart \
${@:2}
fi

View File

@ -0,0 +1,19 @@
# Set up env variables for testing
export AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME
export AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP
export AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS
export AIO_ARTIFACT_PATH=$TEST_AIO_ARTIFACT_PATH
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
export AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME
export AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION
export AIO_GITHUB_REPO=$TEST_AIO_GITHUB_REPO
export AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS
export AIO_SIGNIFICANT_FILES_PATTERN=$TEST_AIO_SIGNIFICANT_FILES_PATTERN
export AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL
export AIO_PREVIEW_SERVER_HOSTNAME=$TEST_AIO_PREVIEW_SERVER_HOSTNAME
export AIO_PREVIEW_SERVER_PORT=$TEST_AIO_PREVIEW_SERVER_PORT
export AIO_ARTIFACT_MAX_SIZE=$TEST_AIO_ARTIFACT_MAX_SIZE
export AIO_CIRCLE_CI_TOKEN=TEST_CIRCLE_CI_TOKEN
export AIO_GITHUB_TOKEN=TEST_GITHUB_TOKEN

View File

@ -1,14 +0,0 @@
#!/bin/bash
set -eu -o pipefail
# Set up env variables for production
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null || echo "MISSING_GITHUB_TOKEN")
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null || echo "MISSING_PREVIEW_DEPLOYMENT_TOKEN")
# Start the upload-server instance
action=$([ "$1" == "stop" ] && echo "stop" || echo "start")
pm2 $action $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server \
--uid $AIO_WWW_USER \
--log /var/log/aio/upload-server-prod.log \
--name aio-upload-server-prod \
${@:2}

View File

@ -1,28 +0,0 @@
#!/bin/bash
set -eu -o pipefail
# Set up env variables for testing
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
export AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME
export AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION
export AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS
export AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG
export AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL
export AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME
export AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/TEST_GITHUB_TOKEN 2>/dev/null || echo "TEST_GITHUB_TOKEN")
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/TEST_PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null || echo "TEST_PREVIEW_DEPLOYMENT_TOKEN")
# Start the upload-server instance
appName=aio-upload-server-test
if [[ "$1" == "stop" ]]; then
pm2 delete $appName
else
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup/start-test-upload-server.js \
--uid $AIO_WWW_USER \
--log /var/log/aio/upload-server-test.log \
--name $appName \
--no-autorestart \
${@:2}
fi

View File

@ -0,0 +1,2 @@
aio-verify-setup
ls -t /var/log/aio/preview-server-verify* | head -1 | xargs cat

View File

@ -2,7 +2,7 @@
set -eu -o pipefail set -eu -o pipefail
logFile=/var/log/aio/verify-setup.log logFile=/var/log/aio/verify-setup.log
uploadServerLogFile=/var/log/aio/upload-server-verify-setup.log previewServerLogFile=/var/log/aio/preview-server-verify-setup.log
exec 3>&1 exec 3>&1
exec >> $logFile exec >> $logFile
@ -23,18 +23,22 @@ function countdown {
} }
function onExit { function onExit {
aio-upload-server-test stop echo -e "Stopping Test Server"
aio-preview-server-test stop
echo -e "Full logs in '$logFile'.\n" > /dev/fd/3 echo -e "Full logs in '$logFile'.\n" > /dev/fd/3
} }
# Setup EXIT trap # Setup EXIT trap
trap 'onExit' EXIT trap 'onExit' EXIT
# Start an upload-server instance for testing # Start an preview-server instance for testing
aio-upload-server-test start --log $uploadServerLogFile echo -e "Starting Test Server"
aio-preview-server-test start --log $previewServerLogFile
# Give the upload-server some time to start :( # Give the preview-server some time to start :(
countdown "Starting" 5 > /dev/fd/3 countdown "Starting" 5 > /dev/fd/3
# Run the tests # Run the tests
echo Running the tests
source aio-test-env
node $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup | tee /dev/fd/3 node $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup | tee /dev/fd/3

View File

@ -10,18 +10,26 @@ environment variables and their default values can be found in the
Each variable has a `TEST_` prefixed counterpart, which is used for testing purposes. In most cases Each variable has a `TEST_` prefixed counterpart, which is used for testing purposes. In most cases
you don't need to specify values for those. you don't need to specify values for those.
- `AIO_ARTIFACT_PATH`:
The path used to identify the AIO build artifact on the CircleCI servers. This should be equal to
the path given in the `.circleci/config.yml` file for the
`aio_preview->steps->store_artifacts->destination` key.
- `AIO_BUILDS_DIR`: - `AIO_BUILDS_DIR`:
The directory (inside the container) where the uploaded build artifacts are kept. The directory (inside the container) where the hosted build artifacts are kept.
- `AIO_DOMAIN_NAME`: - `AIO_DOMAIN_NAME`:
The domain name of the server. The domain name of the server.
- `AIO_GITHUB_ORGANIZATION`: - `AIO_GITHUB_ORGANIZATION`:
The GitHub organization whose teams are whitelisted for accepting uploads. The GitHub organization whose teams are whitelisted for accepting build artifacts.
See also `AIO_GITHUB_TEAM_SLUGS`. See also `AIO_GITHUB_TEAM_SLUGS`.
- `AIO_GITHUB_REPO`:
The Github repository for which PRs will be hosted.
- `AIO_GITHUB_TEAM_SLUGS`: - `AIO_GITHUB_TEAM_SLUGS`:
A comma-separated list of teams, whose authors are allowed to upload PRs. A comma-separated list of teams, whose authors are allowed to preview PRs.
See also `AIO_GITHUB_ORGANIZATION`. See also `AIO_GITHUB_ORGANIZATION`.
- `AIO_NGINX_HOSTNAME`: - `AIO_NGINX_HOSTNAME`:
@ -36,22 +44,24 @@ you don't need to specify values for those.
The port number on which nginx listens for HTTPS connections. This should be mapped to the The port number on which nginx listens for HTTPS connections. This should be mapped to the
corresponding port on the host VM (as described [here](vm-setup--start-docker-container.md)). corresponding port on the host VM (as described [here](vm-setup--start-docker-container.md)).
- `AIO_REPO_SLUG`: - `AIO_SIGNIFICANT_FILES_PATTERN`:
The repository slug (in the form `<user>/<repo>`) for which PRs will be uploaded. The RegExp that determines whether a changed file indicates that a new preview needs to
be deployed. For example, if there is a changed file in the `/packages` directory then
some of the API docs might have changed, so we need to create a new preview.
- `AIO_TRUSTED_PR_LABEL`: - `AIO_TRUSTED_PR_LABEL`:
The PR whose presence indicates the PR has been manually verified and is allowed to have its The PR whose presence indicates the PR has been manually verified and is allowed to have its
build artifacts publicly served. This is useful for enabling previews for any PR (not only those build artifacts publicly served. This is useful for enabling previews for any PR (not only those
from trusted authors). from trusted authors).
- `AIO_UPLOAD_HOSTNAME`: - `AIO_PREVIEW_SERVER_HOSTNAME`:
The internal hostname for accessing the Node.js upload-server. This is used by nginx for The internal hostname for accessing the Node.js preview-server. This is used by nginx for
delegating upload requests and also for performing a periodic health-check. delegating web-hook requests and also for performing a periodic health-check.
- `AIO_UPLOAD_MAX_SIZE`: - `AIO_ARTIFACT_MAX_SIZE`:
The maximum allowed size for the uploaded gzip archive containing the build artifacts. Files The maximum allowed size for the gzip archive containing the build artifacts.
larger than this will be rejected. Files larger than this will be rejected.
- `AIO_UPLOAD_PORT`: - `AIO_PREVIEW_SERVER_PORT`:
The port number on which the Node.js upload-server listens for HTTP connections. This is used by The port number on which the Node.js preview-server listens for HTTP connections. This is used by
nginx for delegating upload requests and also for performing a periodic health-check. nginx for delegating web-hook requests and also for performing a periodic health-check.

View File

@ -5,8 +5,44 @@ TODO (gkalpak): Add docs. Mention:
- `aio-health-check` - `aio-health-check`
- `aio-verify-setup` - `aio-verify-setup`
- Test nginx accessible at: - Test nginx accessible at:
- `http://$TEST_AIO_NGINX_HOTNAME:$TEST_AIO_NGINX_PORT_HTTP` - `http://$TEST_AIO_NGINX_HOSTNAME:$TEST_AIO_NGINX_PORT_HTTP`
- `https://$TEST_AIO_NGINX_HOTNAME:$TEST_AIO_NGINX_PORT_HTTPS` - `https://$TEST_AIO_NGINX_HOSTNAME:$TEST_AIO_NGINX_PORT_HTTPS`
- Test upload-server accessible at: - Test preview-server accessible at:
- `http://$TEST_AIO_UPLOAD_HOTNAME:$TEST_AIO_UPLOAD_PORT` - `http://$TEST_AIO_PREVIEW_SERVER_HOSTNAME:$TEST_AIO_PREVIEW_SERVER_PORT`
- Local DNS (via dnsmasq) maps the above hostnames to 127.0.0.1 - Local DNS (via dnsmasq) maps the above hostnames to 127.0.0.1
## Developing the preview server TypeScript files
If you are running Docker on OS/X then you can benefit from linking the built TypeScript
files (i.e. `script-js/dist`) to the JavaScript files inside the Docker container.
First start watching and building the TypeScript files (in the host):
```bash
yarn build-watch
```
Now build, start and attach to the Docker container. See "Setting up the VM"
section in [TOC](_TOC.md). Then link the JavaScript folders (in the container):
```bash
aio-dev-mode
```
Now whenever you make changes to the TypeScript, it will be automatically built
in the host, and the changes are automatically available in the container.
You can then run the unit tests (in the container):
```bash
aio-verify-setup
```
Sometimes, the errors in the unit test log are not enough to tell you what went wrong.
In that case you can also look at the log of the preview-server itself.
A helper script that runs the unit tests (i.e. `aio-verify-setup`) and displays the
last relevant test-preview-server log is:
```bash
aio-verify-setup-and-log
```

View File

@ -2,10 +2,7 @@
TODO (gkalpak): Add docs. Mention: TODO (gkalpak): Add docs. Mention:
- Travis' JWT addon (+ limitations).
Relevant files: `.travis.yml`, `scripts/ci/env.sh`
- Testing on CI. - Testing on CI.
Relevant files: `scripts/ci/test-aio.sh`, `aio/aio-builds-setup/scripts/test.sh` Relevant files: `scripts/ci/test-aio.sh`, `aio/aio-builds-setup/scripts/test.sh`
- Deploying from CI. - Deploying from CI.
Relevant files: `scripts/ci/deploy.sh`, `aio/scripts/deploy-preview.sh`, Relevant files: `scripts/ci/deploy.sh`, `aio/scripts/deploy-to-firebase.sh`
`aio/scripts/deploy-to-firebase.sh`

View File

@ -2,9 +2,10 @@
## Objective ## Objective
Whenever a PR job is run on Travis, we want to build `angular.io` and upload the build artifacts to Whenever a PR job is run on the CI infrastructure (e.g. CircleCI), we want to build `angular.io`
a publicly accessible server so that collaborators (developers, designers, authors, etc) can preview and host the build artifacts on a publicly accessible server so that collaborators (developers,
the changes without having to checkout and build the app locally. designers, authors, etc) can preview the changes without having to checkout and build the app
locally.
## Source code ## Source code
@ -32,48 +33,38 @@ This section gives a brief summary of the several operations performed on CI and
container: container:
### On CI (Travis) ### On CI (CircleCI)
- Build job completes successfully. - Build job completes successfully.
- The CI script checks whether the build job was initiated by a PR against the angular/angular - The CI script checks whether the build job was initiated by a PR against the angular/angular
master branch. master branch.
- The CI script checks whether the PR has touched any files that might affect the angular.io app - The CI script checks whether the PR has touched any files that might affect the angular.io app
(currently the `aio/` or `packages/` directories, ignoring spec files). (currently the `aio/` or `packages/` directories, ignoring spec files).
- Optionally, the CI script can check whether the PR can be automatically verified (i.e. if the - The CI script gzips and stores the build artifacts in the CI infrastructure.
author of the PR is a member of one of the whitelisted GitHub teams or the PR has the specified - When the build completes CircleCI triggers a webhook on the preview-server.
"trusted PR" label).
**Note:**
For security reasons, the same checks will be performed on the server as well. This is an optional
step that can be used in case one wants to apply special logic depending on the outcome of the
pre-verification. For example:
1. One might want to deploy automatically verified PRs only. In that case, the pre-verification
helps avoid the wasted overhead associated with uploads that are going to be rejected (e.g.
building the artifacts, sending them to the server, running checks on the server, detecting the
reasons of deployment failure and whether to fail the build, etc).
2. One might want to apply additional logic (e.g. different tests) depending on whether the PR is
automatically verified or not).
- The CI script gzips and uploads the build artifacts to the server.
More info on how to set things up on CI can be found [here](misc--integrate-with-ci.md). More info on how to set things up on CI can be found [here](misc--integrate-with-ci.md).
### Uploading build artifacts ### Hosting build artifacts
- nginx receives the upload request.
- nginx checks that the uploaded gzip archive does not exceed the specified max file size, stores it - nginx receives the webhook trigger and passes it through to the preview server.
in a temporary location and passes the filepath to the Node.js upload-server. - The preview-server makes a request to CircleCI for the URL of the AIO build artifacts.
- The upload-server runs several checks to determine whether the request should be accepted and - The preview-server makes a request to this URL to receive the artifact - failing if the size
exceeds the specified max file size - and stores it in a temporary location.
- The preview-server runs several checks to determine whether the request should be accepted and
whether it should be publicly accessible or stored for later verification (more details can be whether it should be publicly accessible or stored for later verification (more details can be
found [here](overview--security-model.md)). found [here](overview--security-model.md)).
- The upload-server changes the "visibility" of the associated PR, if necessary. For example, if - The preview-server changes the "visibility" of the associated PR, if necessary. For example, if
builds for the same PR had been previously deployed as non-public and the current build has been builds for the same PR had been previously deployed as non-public and the current build has been
automatically verified, all previous builds are made public as well. automatically verified, all previous builds are made public as well.
If the PR transitions from "non-public" to "public", the upload-server posts a comment on the If the PR transitions from "non-public" to "public", the preview-server posts a comment on the
corresponding PR on GitHub mentioning the SHAs and the links where the previews can be found. corresponding PR on GitHub mentioning the SHAs and the links where the previews can be found.
- The upload-server verifies that the uploaded file is not trying to overwrite an existing build. - The preview-server verifies that it is not trying to overwrite an existing build.
- The upload-server deploys the artifacts to a sub-directory named after the PR number and the first - The preview-server deploys the artifacts to a sub-directory named after the PR number and the first
few characters of the SHA: `<PR>/<SHA>/` few characters of the SHA: `<PR>/<SHA>/`
(Non-publicly accessible PRs will be stored in a different location, but again derived from the PR (Non-publicly accessible PRs will be stored in a different location, but again derived from the PR
number and SHA.) number and SHA.)
- If the PR is publicly accessible, the upload-server posts a comment on the corresponding PR on - If the PR is publicly accessible, the preview-server posts a comment on the corresponding PR on
GitHub mentioning the SHA and the link where the preview can be found. GitHub mentioning the SHA and the link where the preview can be found.
More info on the possible HTTP status codes and their meaning can be found More info on the possible HTTP status codes and their meaning can be found
@ -82,24 +73,24 @@ More info on the possible HTTP status codes and their meaning can be found
### Updating PR visibility ### Updating PR visibility
- nginx receives a natification that a PR has been updated and passes it through to the - nginx receives a natification that a PR has been updated and passes it through to the
upload-server. This could, for example, be sent by a GitHub webhook every time a PR's labels preview-server. This could, for example, be sent by a GitHub webhook every time a PR's labels
change. change.
E.g.: `ngbuilds.io/pr-updated` (payload: `{"number":<PR>,"action":"labeled"}`) E.g.: `ngbuilds.io/pr-updated` (payload: `{"number":<PR>,"action":"labeled"}`)
- The request contains the PR number (as `number`) and optionally the action that triggered the - The request contains the PR number (as `number`) and optionally the action that triggered the
request (as `action`) in the payload. request (as `action`) in the payload.
- The upload-server verifies the payload and determines whether the `action` (if specified) could - The preview-server verifies the payload and determines whether the `action` (if specified) could
have led to PR visibility changes. Only requests that omit the `action` field altogether or have led to PR visibility changes. Only requests that omit the `action` field altogether or
specify an action that can affect visibility are further processed. specify an action that can affect visibility are further processed.
(Currently, the only actions that are considered capable of affecting visibility are `labeled` and (Currently, the only actions that are considered capable of affecting visibility are `labeled` and
`unlabeled`.) `unlabeled`.)
- The upload-server re-checks and if necessary updates the PR's visibility. - The preview-server re-checks and if necessary updates the PR's visibility.
More info on the possible HTTP status codes and their meaning can be found More info on the possible HTTP status codes and their meaning can be found
[here](overview--http-status-codes.md). [here](overview--http-status-codes.md).
### Serving build artifacts ### Serving build artifacts
- nginx receives a request for an uploaded resource on a subdomain corresponding to the PR and SHA. - nginx receives a request for a hosted preview resource on a subdomain corresponding to the PR and SHA.
E.g.: `pr<PR>-<SHA>.ngbuilds.io/path/to/resource` E.g.: `pr<PR>-<SHA>.ngbuilds.io/path/to/resource`
- nginx maps the subdomain to the correct sub-directory and serves the resource. - nginx maps the subdomain to the correct sub-directory and serves the resource.
E.g.: `/<PR>/<SHA>/path/to/resource` E.g.: `/<PR>/<SHA>/path/to/resource`
@ -117,4 +108,4 @@ that do not correspond with an open PR.
### Health-check ### Health-check
The docker service runs a periodic health-check that verifies the running conditions of the The docker service runs a periodic health-check that verifies the running conditions of the
container. This includes verifying the status of specific system services, the responsiveness of container. This includes verifying the status of specific system services, the responsiveness of
nginx and the upload-server and internet connectivity. nginx and the preview-server and internet connectivity.

View File

@ -1,8 +1,8 @@
# Overview - HTTP Status Codes # Overview - HTTP Status Codes
This is a list of all the possible HTTP status codes returned by the nginx anf upload servers, along This is a list of all the possible HTTP status codes returned by the nginx and preview servers, along
with a bried explanation of what they mean: with a brief explanation of what they mean:
## `http://*.ngbuilds.io/*` ## `http://*.ngbuilds.io/*`
@ -25,7 +25,7 @@ with a bried explanation of what they mean:
File not found. File not found.
## `https://ngbuilds.io/create-build/<pr>/<sha>` ## `https://ngbuilds.io/circle-build`
- **201 (Created)**: - **201 (Created)**:
Build deployed successfully and is publicly available. Build deployed successfully and is publicly available.
@ -33,14 +33,14 @@ with a bried explanation of what they mean:
- **202 (Accepted)**: - **202 (Accepted)**:
Build not automatically verifiable. Stored for later deployment (after re-verification). Build not automatically verifiable. Stored for later deployment (after re-verification).
- **400 (Bad Request)**: - **204 (No Content)**:
No payload. Build was not successful, so no further action is being taken.
- **401 (Unauthorized)**: - **400 (Bad Request)**:
No `AUTHORIZATION` header. Invalid payload.
- **403 (Forbidden)**: - **403 (Forbidden)**:
Unable to verify build (e.g. invalid JWT token, or unable to talk to 3rd-party APIs, etc). Unable to talk to 3rd-party APIs.
- **405 (Method Not Allowed)**: - **405 (Method Not Allowed)**:
Request method other than POST. Request method other than POST.
@ -49,9 +49,6 @@ with a bried explanation of what they mean:
Request to overwrite existing (public or non-public) directory (e.g. deploy existing build or Request to overwrite existing (public or non-public) directory (e.g. deploy existing build or
change PR visibility when the destination directory does already exist). change PR visibility when the destination directory does already exist).
- **413 (Payload Too Large)**:
Payload larger than size specified in `AIO_UPLOAD_MAX_SIZE`.
## `https://ngbuilds.io/health-check` ## `https://ngbuilds.io/health-check`

View File

@ -21,7 +21,7 @@ available:
from a git repository. See [here](vm-setup--update-docker-container.md) for more info. from a git repository. See [here](vm-setup--update-docker-container.md) for more info.
## Commands ## Production Commands
The following commands are available globally from inside the docker container. They are either used The following commands are available globally from inside the docker container. They are either used
by the container to perform its various operations or can be used ad-hoc, mainly for testing by the container to perform its various operations or can be used ad-hoc, mainly for testing
purposes. Each command is backed by a corresponding script inside purposes. Each command is backed by a corresponding script inside
@ -40,14 +40,27 @@ purposes. Each command is backed by a corresponding script inside
Initializes the container (mainly by starting the necessary services). Initializes the container (mainly by starting the necessary services).
_It is run (by default) when starting the container._ _It is run (by default) when starting the container._
- `aio-upload-server-prod`: - `aio-preview-server-prod`:
Spins up a Node.js upload-server instance. Spins up a Node.js preview-server instance.
_It is used in `aio-init` (see above) during initialization._ _It is used in `aio-init` (see above) during initialization._
- `aio-upload-server-test`:
Spins up a Node.js upload-server instance for tests. ## Developer Commands
- `aio-preview-server-test`:
Spins up a Node.js preview-server instance for tests.
_It is used in `aio-verify-setup` (see below) for running tests._ _It is used in `aio-verify-setup` (see below) for running tests._
- `aio-verify-setup`: - `aio-verify-setup`:
Runs a suite of e2e-like tests, mainly verifying the correct (inter)operation of nginx and the Runs a suite of e2e-like tests, mainly verifying the correct (inter)operation of nginx and the
Node.js upload-server. Node.js preview-server.
- `aio-verify-setup-and-log`:
Runs the `aio-verify-setup` command but also then dumps the logs from the preview server, which
gives additional useful debugging information. See the [debugging docs](misc--debug-docker-container.md)
for more info.
- `aio-dev-mode`:
Links external source files (from the Docker host) to interal source files (in the Docker
container). This makes it easier to use an IDE to edit files in the host that are then
tested in the container. See the [debugging docs](misc--debug-docker-container.md) for more info.

View File

@ -1,27 +1,27 @@
# Overview - Security model # Overview - Security model
Whenever a PR job is run on Travis, we want to build `angular.io` and upload the build artifacts to Whenever a PR job is run on CircleCI, we want to build `angular.io` and host the build artifacts on
a publicly accessible server so that collaborators (developers, designers, authors, etc) can preview a publicly accessible server so that collaborators (developers, designers, authors, etc) can preview
the changes without having to checkout and build the app locally. the changes without having to checkout and build the app locally.
This document discusses the security considerations associated with uploading build artifacts as This document discusses the security considerations associated with moving build artifacts as
part of the CI setup and serving them publicly. part of the CI process and serving them publicly.
## Security objectives ## Security objectives
- **Prevent uploading arbitrary content to our servers.** - **Prevent hosting arbitrary content to on servers.**
Since there is no restriction on who can submit a PR, we cannot allow any PR's build artifacts to Since there is no restriction on who can submit a PR, we cannot allow arbitrary untrusted PRs'
be uploaded. build artifacts to be hosted.
- **Prevent overwriting other peoples uploaded content.** - **Prevent overwriting other people's hosted build artifacts.**
There needs to be a mechanism in place to ensure that the uploaded content does indeed correspond There needs to be a mechanism in place to ensure that the hosted content does indeed correspond
to the PR indicated by its URL. to the PR indicated by its URL.
- **Prevent arbitrary access on the server.** - **Prevent arbitrary access on the server.**
Since the PR author has full access over the build artifacts that would be uploaded, we must Since the PR author has full access over the build artifacts that would be hosted, we must
ensure that the uploaded files will not enable arbitrary access to the server or expose sensitive ensure that the build artifacts will not have arbitrary access to the server or expose sensitive
info. info.
@ -30,7 +30,7 @@ part of the CI setup and serving them publicly.
- Because the PR author can change the scripts run on CI, any security mechanisms must be immune to - Because the PR author can change the scripts run on CI, any security mechanisms must be immune to
such changes. such changes.
- For security reasons, encrypted Travis variables are not available to PRs, so we can't rely on - For security reasons, encrypted CircleCI variables are not available to PRs, so we can't rely on
them to implement security. them to implement security.
@ -40,35 +40,42 @@ part of the CI setup and serving them publicly.
### In a nutshell ### In a nutshell
The implemented approach can be broken up to the following sub-tasks: The implemented approach can be broken up to the following sub-tasks:
1. Verify which PR the uploaded artifacts correspond to. 0. Receive notification from CircleCI of a completed build.
1. Verify that the build is valid and download the artifact.
2. Fetch the PR's metadata, including author and labels. 2. Fetch the PR's metadata, including author and labels.
3. Check whether the PR can be automatically verified as "trusted" (based on its author or labels). 3. Check whether the PR can be automatically verified as "trusted" (based on its author or labels).
4. If necessary, update the corresponding PR's verification status. 4. If necessary, update the corresponding PR's verification status.
5. Deploy the artifacts to the corresponding PR's directory. 5. Deploy the artifacts to the corresponding PR's directory.
6. Prevent overwriting previously deployed artifacts (which ensures that the guarantees established 6. Prevent overwriting previously deployed artifacts (which ensures that the guarantees established
during deployment will remain valid until the artifacts are removed). during deployment will remain valid until the artifacts are removed).
7. Prevent uploaded files from accessing anything outside their directory. 7. Prevent hosted preview files from accessing anything outside their directory.
### Implementation details ### Implementation details
This section describes how each of the aforementioned sub-tasks is accomplished: This section describes how each of the aforementioned sub-tasks is accomplished:
1. **Verify which PR the uploaded artifacts correspond to.** 0. **Receive notification from CircleCI of a completed build**
We are taking advantage of Travis' [JWT addon](https://docs.travis-ci.com/user/jwt). By sharing CircleCI is configured to trigger a webhook on our preview-server whenever a build completes.
a secret between Travis (which keeps it private but uses it to sign a JWT) and the server (which The payload contains the number of the build that completed.
uses it to verify the authenticity of the JWT), we can accomplish the following:
a. Verify that the upload request comes from Travis.
b. Determine the PR that these artifacts correspond to (since Travis puts that information into
the JWT, without the PR author being able to modify it).
_Note:_ 1. **Verify that the build is valid and download the artifact.**
_There are currently certain limitation in the implementation of the JWT addon._
_See the next section for more details._ We cannot trust that the data in the webhook trigger is authentic, so we only extract the build
number and then run a direct query against the CircleCI API to get hold of the real data for
the given build number.
If the build was not successful then we ignore this trigger. Otherwise we check that the
associated github organisation and repository are what we expect (e.g. angular/angular).
Next we make another call to the CircleCI API to get a list of the URLS for artifacts of that
build. If there is one that matches the configured artifact path, we download the contents of the
build artifact and store it in a local folder. This download has a maximum size limit to prevent
PRs from producing artifacts that are so large they would cause the preview server to crash.
2. **Fetch the PR's metadata, including author and labels**. 2. **Fetch the PR's metadata, including author and labels**.
Once we have securely associated the uploaded artifacts to a PR, we retrieve the PR's metadata - Once we have securely downloaded the artifact for a build, we retrieve the PR's metadata -
including the author's username and the labels - using the including the author's username and the labels - using the
[GitHub API](https://developer.github.com/v3/). [GitHub API](https://developer.github.com/v3/).
To avoid rate-limit restrictions, we use a Personal Access Token (issued by To avoid rate-limit restrictions, we use a Personal Access Token (issued by
@ -91,48 +98,42 @@ This section describes how each of the aforementioned sub-tasks is accomplished:
Once we have determined whether the PR is considered "trusted", we update its "visibility" (i.e. Once we have determined whether the PR is considered "trusted", we update its "visibility" (i.e.
whether it is publicly accessible or not), based on the new verification status. For example, if whether it is publicly accessible or not), based on the new verification status. For example, if
a PR was initially considered "not trusted" but the check triggered by a new build determined a PR was initially considered "not trusted" but the check triggered by a new build determined
otherwise, the PR (and all the previously uploaded previews) are made public. It works the same otherwise, the PR (and all the previously hosted previews) are made public. It works the same
way if a PR has gone from "trusted" to "not trusted". way if a PR has gone from "trusted" to "not trusted".
5. **Deploy the artifacts to the corresponding PR's directory.** 5. **Deploy the artifacts to the corresponding PR's directory.**
With the preceding steps, we have verified that the uploaded artifacts have been uploaded by With the preceding steps, we have verified that the build artifacts are valid.
Travis. Additionally, we have determined whether the PR can be trusted to have its previews Additionally, we have determined whether the PR can be trusted to have its previews
publicly accessible or whether further verification is necessary. The artifacts will be stored to publicly accessible or whether further verification is necessary. The artifacts will be stored to
the PR's directory, but will not be publicly accessible unless the PR has been verified. the PR's directory, but will not be publicly accessible unless the PR has been verified.
Essentially, as long as sub-tasks 1, 2 and 3 can be securely accomplished, it is possible to Essentially, as long as sub-tasks 1, 2 and 3 can be securely accomplished, it is possible to
"project" the trust we have in a team's members through the PR and Travis to the build artifacts. "project" the trust we have in a team's members through the PR to the build artifacts.
6. **Prevent overwriting previously deployed artifacts**. 6. **Prevent overwriting previously deployed artifacts**.
In order to enforce this restriction (and ensure that the deployed artifacts' validity is In order to enforce this restriction (and ensure that the deployed artifacts' validity is
preserved throughout their "lifetime"), the server that handles the upload (currently a Node.js preserved throughout their "lifetime"), the server that handles the artifacts (currently a Node.js
Express server) rejects uploads that target an existing directory. Express server) rejects builds that have already been handled.
_Note: A PR can contain multiple uploads; one for each SHA that was built on Travis._ _Note: A PR can contain multiple builds; one for each SHA that was built on CircleCI._
7. **Prevent uploaded files from accessing anything outside their directory.** 7. **Prevent hosted preview files from accessing anything outside their directory.**
Nginx (which is used to serve the uploaded artifacts) has been configured to not follow symlinks Nginx (which is used to serve the hosted preview) has been configured to not follow symlinks
outside of the directory where the build artifacts are stored. outside of the directory where the preview files are stored.
## Assumptions / Things to keep in mind ## Assumptions / Things to keep in mind
- Each trusted PR author has full control over the content that is uploaded for their PRs. Part of - Other than the initial webhook trigger, which provides a build number, all requests for data come
the security model relies on the trustworthiness of these authors. from the preview-server making requests to well defined API endpoints (e.g. CircleCI and Github).
This means that any secret access keys need only be stored on the preview-server and not on any of
the CI build infrastructure (e.g. CircleCI).
- Adding the specified label on a PR and marking it as trusted, gives the author full control over - Each trusted PR author has full control over the content that is hosted as a preview for their PRs.
the content that is uploaded for the specific PR (e.g. by pushing more commits to it). The user Part of the security model relies on the trustworthiness of these authors.
adding the label is responsible for ensuring that this control is not abused and that the PR is
either closed (one way of another) or the access is revoked.
- If anyone gets access to the `PREVIEW_DEPLOYMENT_TOKEN` (a.k.a. `NGBUILDS_IO_KEY` on - Adding the specified label on a PR to mark it as trusted, gives the author full control over
angular/angular) variable generated for each Travis job, they will be able to impersonate the the content that is hosted for the specific PR preview (e.g. by pushing more commits to it).
corresponding PR's author on the preview server for as long as the token is valid (currently 90 The user adding the label is responsible for ensuring that this control is not abused and that
mins). Because of this, the value of the `PREVIEW_DEPLOYMENT_TOKEN` should not be made publicly the PR is either closed (one way of another) or the access is revoked.
accessible (e.g. by printing it on the Travis job log).
- Travis does only allow specific whitelisted property names to be used with the JWT addon. The only
known such property at the time is `SAUCE_ACCESS_KEY` (used for integration with SauceLabs). In
order to be able to actually use the JWT addon we had to name the encrypted variable
`SAUCE_ACCESS_KEY` (which we later re-assign to `NGBUILDS_IO_KEY`).

View File

@ -1,6 +1,12 @@
# VM setup - Create docker image # VM setup - Create docker image
## Install node and yarn
- Install [nvm](https://github.com/creationix/nvm#installation).
- Install node.js: `nvm install 8`
- Install yarn: `npm -g install yarn`
## Checkout repository ## Checkout repository
- `git clone <repo-url>` - `git clone <repo-url>`
@ -21,7 +27,7 @@ The following commands would create a docker image from GitHub repo `foo/bar` to
- `git clone https://github.com/foo/bar.git foobar` - `git clone https://github.com/foo/bar.git foobar`
- Run: - Run:
``` ```
./foobar/aio-builds-setup/scripts/build.sh foobar-builds \ ./foobar/aio-builds-setup/scripts/create-image.sh foobar-builds \
--build-arg AIO_REPO_SLUG=foo/bar \ --build-arg AIO_REPO_SLUG=foo/bar \
--build-arg AIO_DOMAIN_NAME=foobar-builds.io \ --build-arg AIO_DOMAIN_NAME=foobar-builds.io \
--build-arg AIO_GITHUB_ORGANIZATION=foo \ --build-arg AIO_GITHUB_ORGANIZATION=foo \

View File

@ -12,8 +12,8 @@ More info on how to create `secrets` directory and files can be found
## Create directory for build artifacts ## Create directory for build artifacts
The uploaded build artifacts should be kept on a directory outside the docker container, so it is The build artifacts should be kept on a directory outside the docker container, so it is
easier to replace the container without losing the uploaded builds. For portability across VMs a easier to replace the container without losing the builds. For portability across VMs a
persistent disk can be used (as described [here](vm-setup--attach-persistent-disk.md)). persistent disk can be used (as described [here](vm-setup--attach-persistent-disk.md)).
**Note:** The directories created inside that directory will be owned by user `www-data`. **Note:** The directories created inside that directory will be owned by user `www-data`.
@ -21,7 +21,7 @@ persistent disk can be used (as described [here](vm-setup--attach-persistent-dis
## Create SSL certificates (Optional for dev) ## Create SSL certificates (Optional for dev)
The host VM can attach a directory containing the SSL certificate and key to be used by the nginx The host VM can attach a directory containing the SSL certificate and key to be used by the nginx
server for serving the uploaded build artifacts. More info on how to attach the directory when server for serving the hosted previews. More info on how to attach the directory when
starting the container can be found [here](vm-setup--start-docker-container.md). starting the container can be found [here](vm-setup--start-docker-container.md).
In order for the container to be able to find the certificate and key, they should be named In order for the container to be able to find the certificate and key, they should be named
@ -61,15 +61,15 @@ The following log files are kept in this directory:
used when running tests locally from inside the container, e.g. with the `aio-verify-setup` used when running tests locally from inside the container, e.g. with the `aio-verify-setup`
command. (See [here](overview--scripts-and-commands.md) for more info.) command. (See [here](overview--scripts-and-commands.md) for more info.)
- `upload-server-{prod,test,verify-setup}-*.log`: - `preview-server-{prod,test,verify-setup}-*.log`:
The logs produced by the Node.js upload-server while serving either: The logs produced by the Node.js preview-server while serving either:
- `-prod`: "Production" files (g.g during normal operation). - `-prod`: "Production" files (g.g during normal operation).
- `-test`: "Test" files (e.g. when a test instance is started with the `aio-upload-server-test` - `-test`: "Test" files (e.g. when a test instance is started with the `aio-preview-server-test`
command). command).
- `-verify-setup`: "Test" files, but while running `aio-verify-setup`. - `-verify-setup`: "Test" files, but while running `aio-verify-setup`.
(See [here](overview--scripts-and-commands.md) for more info the commands mentioned above.) (See [here](overview--scripts-and-commands.md) for more info the commands mentioned above.)
- `verify-setup.log`: - `verify-setup.log`:
The output of the `aio-verify-setup` command (e.g. Jasmine output), except for upload-server The output of the `aio-verify-setup` command (e.g. Jasmine output), except for preview-server
output which is logged to `upload-server-verify-setup-*.log` (see above). output which is logged to `preview-server-verify-setup-*.log` (see above).

View File

@ -12,14 +12,10 @@ Necessary secrets:
- Retrieving members of the trusted GitHub teams. - Retrieving members of the trusted GitHub teams.
- Posting comments with preview links on PRs. - Posting comments with preview links on PRs.
2. `PREVIEW_DEPLOYMENT_TOKEN` 2. `CIRCLE_CI_TOKEN`
- Used for: - Used for:
- Decoding the JWT tokens received with `/create-build` requests. - Retrieving build information.
- Downloading build artifacts.
**Note:**
`TEST_GITHUB_TOKEN` and `TEST_PREVIEW_DEPLOYMENT_TOKEN` can also be created similar to their
non-TEST counterparts and they will be loaded when running `aio-verify-setup`, but it is currently
not clear if/how they can be used in tests.
## Create secrets ## Create secrets
@ -28,25 +24,15 @@ not clear if/how they can be used in tests.
- Visit https://github.com/settings/tokens. - Visit https://github.com/settings/tokens.
- Generate new token with the `public_repo` scope. - Generate new token with the `public_repo` scope.
2. `PREVIEW_DEPLOYMENT_TOKEN` 2. `CIRCLE_CI_TOKEN`
- Just generate a hard-to-guess character sequence. - Visit https://circleci.com/gh/angular/angular/edit#api
- Add it to `.travis.yml` under `addons -> jwt -> secure`. - Create an API token with `Build Artifacts` scope
Can be added automatically with: `travis encrypt --add addons.jwt PREVIEW_DEPLOYMENT_TOKEN=<access-key>`
**Note:**
Due to [travis-ci/travis-ci#7223](https://github.com/travis-ci/travis-ci/issues/7223) it is not
currently possible to use the JWT addon (as described above) for anything other than the
`SAUCE_ACCESS_KEY` variable. You can get creative, though...
**WARNING**
TO avoid arbitrary uploads, make sure the `PREVIEW_DEPLOYMENT_TOKEN` is NOT printed in the Travis log.
## Save secrets on the VM ## Save secrets on the VM
- `sudo mkdir /aio-secrets` - `sudo mkdir /aio-secrets`
- `sudo touch /aio-secrets/GITHUB_TOKEN` - `sudo touch /aio-secrets/GITHUB_TOKEN`
- Insert `<github-token>` into `/aio-secrets/GITHUB_TOKEN`. - Insert `<github-token>` into `/aio-secrets/GITHUB_TOKEN`.
- `sudo touch /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN` - `sudo touch /aio-secrets/CIRCLE_CI_TOKEN`
- Insert `<access-token>` into `/aio-secrets/PREVIEW_DEPLOYMENT_TOKEN`. - Insert `<access-token>` into `/aio-secrets/CIRCLE_CI_TOKEN`.
- `sudo chmod 400 /aio-secrets/*` - `sudo chmod 400 /aio-secrets/*`

View File

@ -13,14 +13,15 @@ sudo docker run \
--publish 80:80 \ --publish 80:80 \
--publish 443:443 \ --publish 443:443 \
--restart unless-stopped \ --restart unless-stopped \
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
--volume <host-secrets-dir>:/aio-secrets:ro \ --volume <host-secrets-dir>:/aio-secrets:ro \
--volume <host-builds-dir>:/var/www/aio-builds \ --volume <host-builds-dir>:/var/www/aio-builds \
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
[--volume <host-logs-dir>:/var/log/aio] \ [--volume <host-logs-dir>:/var/log/aio] \
[--volume <host-dockerbuild-dir>:/dockerbuild] \
<name>[:<tag>] <name>[:<tag>]
``` ```
Below is the same command with inline comments explaining each option. The aPI docs for `docker run` Below is the same command with inline comments explaining each option. The API docs for `docker run`
can be found [here](https://docs.docker.com/engine/reference/run/). can be found [here](https://docs.docker.com/engine/reference/run/).
``` ```
@ -30,7 +31,7 @@ sudo docker run \
--detach \ --detach \
# Use the local DNS server. # Use the local DNS server.
# (This is necessary for mapping internal URLs, e.g. for the Node.js upload-server.) # (This is necessary for mapping internal URLs, e.g. for the Node.js preview-server.)
--dns 127.0.0.1 \ --dns 127.0.0.1 \
# USe `<instance-name>` as an alias for the container. # USe `<instance-name>` as an alias for the container.
@ -45,28 +46,32 @@ sudo docker run \
# (This ensures that the container will be automatically started on boot.) # (This ensures that the container will be automatically started on boot.)
--restart unless-stopped \ --restart unless-stopped \
# The directory the contains the SSL certificates.
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
# If not provided, the container will use self-signed certificates.
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
# The directory the contains the secrets (e.g. GitHub token, JWT secret, etc). # The directory the contains the secrets (e.g. GitHub token, JWT secret, etc).
# (See [here](vm-setup--set-up-secrets.md) for more info.) # (See [here](vm-setup--set-up-secrets.md) for more info.)
--volume <host-secrets-dir>:/aio-secrets:ro \ --volume <host-secrets-dir>:/aio-secrets:ro \
# The uploaded build artifacts will stored to and served from this directory. # The build artifacts and hosted previews will stored to and served from this directory.
# (If you are using a persistent disk - as described [here](vm-setup--attach-persistent-disk.md) - # (If you are using a persistent disk - as described [here](vm-setup--attach-persistent-disk.md) -
# this will be a directory inside the disk.) # this will be a directory inside the disk.)
--volume <host-builds-dir>:/var/www/aio-builds \ --volume <host-builds-dir>:/var/www/aio-builds \
# The directory the contains the SSL certificates.
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
# If not provided, the container will use self-signed certificates.
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
# The directory where the logs are being kept. # The directory where the logs are being kept.
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.) # (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
# If not provided, the logs will be kept inside the container, which means they will be lost # If not provided, the logs will be kept inside the container, which means they will be lost
# whenever a new container is created. # whenever a new container is created.
[--volume <host-logs-dir>:/var/log/aio] \ [--volume <host-logs-dir>:/var/log/aio] \
# This directory allows you to share the source scripts between the host and the container when
# debugging. (See [here](misc--debug-docker-container.md) for how to set this up.)
[--volume <host-dockerbuild-dir>:/dockerbuild] \
# The name of the docker image to use (and an optional tag; defaults to `latest`). # The name of the docker image to use (and an optional tag; defaults to `latest`).
# (See [here](vm-setup--create-docker-image.md) for instructions on how to create the iamge.) # (See [here](vm-setup--create-docker-image.md) for instructions on how to create the image.)
<name>[:<tag>] <name>[:<tag>]
``` ```
@ -74,7 +79,8 @@ sudo docker run \
## Example ## Example
The following command would start a docker container based on the previously created `foobar-builds` The following command would start a docker container based on the previously created `foobar-builds`
docker image, alias it as 'foobar-builds-1' and map predefined directories on the host VM to be used docker image, alias it as 'foobar-builds-1' and map predefined directories on the host VM to be used
by the container for accessing secrets and SSL certificates and keeping the build artifacts and logs. by the container for accessing secrets and SSL certificates and keeping the build artifacts and logs;
and will map the source scripts from the host to the container.
``` ```
sudo docker run \ sudo docker run \
@ -84,9 +90,10 @@ sudo docker run \
--publish 80:80 \ --publish 80:80 \
--publish 443:443 \ --publish 443:443 \
--restart unless-stopped \ --restart unless-stopped \
--volume /etc/ssl/localcerts:/etc/ssl/localcerts:ro \
--volume /foobar-secrets:/aio-secrets:ro \ --volume /foobar-secrets:/aio-secrets:ro \
--volume /mnt/disks/foobar-builds:/var/www/aio-builds \ --volume /mnt/disks/foobar-builds:/var/www/aio-builds \
--volume /etc/ssl/localcerts:/etc/ssl/localcerts:ro \
--volume /foobar-logs:/var/log/aio \ --volume /foobar-logs:/var/log/aio \
--volume ~/angular/aio/aio-builds-setup/dockerbuild:/dockerbuild \
foobar-builds foobar-builds
``` ```

View File

@ -33,7 +33,6 @@
"src/assets", "src/assets",
"src/generated", "src/generated",
"src/app/search/search-worker.js", "src/app/search/search-worker.js",
"src/favicon.ico",
"src/pwa-manifest.json", "src/pwa-manifest.json",
"src/google385281288605d160.html", "src/google385281288605d160.html",
{ {
@ -62,7 +61,8 @@
"src": "src/environments/environment.ts", "src": "src/environments/environment.ts",
"replaceWith": "src/environments/environment.next.ts" "replaceWith": "src/environments/environment.next.ts"
} }
] ],
"serviceWorker": true
}, },
"stable": { "stable": {
"fileReplacements": [ "fileReplacements": [
@ -70,7 +70,8 @@
"src": "src/environments/environment.ts", "src": "src/environments/environment.ts",
"replaceWith": "src/environments/environment.stable.ts" "replaceWith": "src/environments/environment.stable.ts"
} }
] ],
"serviceWorker": true
}, },
"archive": { "archive": {
"fileReplacements": [ "fileReplacements": [
@ -78,7 +79,8 @@
"src": "src/environments/environment.ts", "src": "src/environments/environment.ts",
"replaceWith": "src/environments/environment.archive.ts" "replaceWith": "src/environments/environment.archive.ts"
} }
] ],
"serviceWorker": true
} }
} }
}, },
@ -123,7 +125,6 @@
"src/assets", "src/assets",
"src/generated", "src/generated",
"src/app/search/search-worker.js", "src/app/search/search-worker.js",
"src/favicon.ico",
"src/pwa-manifest.json", "src/pwa-manifest.json",
"src/google385281288605d160.html", "src/google385281288605d160.html",
{ {

View File

@ -36,3 +36,7 @@
<div class="di-component"> <div class="di-component">
<app-parent-finder></app-parent-finder> <app-parent-finder></app-parent-finder>
</div> </div>
<div class="di-component">
<app-storage></app-storage>
</div>

View File

@ -9,9 +9,6 @@ import { UserService } from './user.service';
@Component({ @Component({
selector: 'app-root', selector: 'app-root',
templateUrl: './app.component.html', templateUrl: './app.component.html',
// #docregion providers
providers: [ LoggerService, UserContextService, UserService ]
// #enddocregion providers
}) })
export class AppComponent { export class AppComponent {
// #enddocregion import-services // #enddocregion import-services

View File

@ -31,6 +31,7 @@ import { ParentFinderComponent,
BarryComponent, BarryComponent,
BethComponent, BethComponent,
BobComponent } from './parent-finder.component'; BobComponent } from './parent-finder.component';
import { StorageComponent } from './storage.component';
const declarations = [ const declarations = [
AppComponent, AppComponent,
@ -63,6 +64,7 @@ const c_components = [
a_components, a_components,
b_components, b_components,
c_components, c_components,
StorageComponent,
], ],
bootstrap: [ AppComponent ], bootstrap: [ AppComponent ],
// #docregion providers // #docregion providers

View File

@ -5,7 +5,9 @@ import { Injectable } from '@angular/core';
import { LoggerService } from './logger.service'; import { LoggerService } from './logger.service';
// #docregion date-logger-service // #docregion date-logger-service
@Injectable() @Injectable({
providedIn: 'root'
})
// #docregion date-logger-service-signature // #docregion date-logger-service-signature
export class DateLoggerService extends LoggerService export class DateLoggerService extends LoggerService
// #enddocregion date-logger-service-signature // #enddocregion date-logger-service-signature

View File

@ -2,7 +2,9 @@
import { Injectable } from '@angular/core'; import { Injectable } from '@angular/core';
import { Hero } from './hero'; import { Hero } from './hero';
@Injectable() @Injectable({
providedIn: 'root'
})
export class HeroService { export class HeroService {
// TODO: move to database // TODO: move to database

View File

@ -1,7 +1,9 @@
// #docregion // #docregion
import { Injectable } from '@angular/core'; import { Injectable } from '@angular/core';
@Injectable() @Injectable({
providedIn: 'root'
})
export class LoggerService { export class LoggerService {
logs: string[] = []; logs: string[] = [];

View File

@ -0,0 +1,38 @@
// #docregion
import { Component, OnInit, Self, SkipSelf } from '@angular/core';
import { BROWSER_STORAGE, BrowserStorageService } from './storage.service';
@Component({
selector: 'app-storage',
template: `
Open the inspector to see the local/session storage keys:
<h3>Session Storage</h3>
<button (click)="setSession()">Set Session Storage</button>
<h3>Local Storage</h3>
<button (click)="setLocal()">Set Local Storage</button>
`,
providers: [
BrowserStorageService,
{ provide: BROWSER_STORAGE, useFactory: () => sessionStorage }
]
})
export class StorageComponent implements OnInit {
constructor(
@Self() private sessionStorageService: BrowserStorageService,
@SkipSelf() private localStorageService: BrowserStorageService,
) { }
ngOnInit() {
}
setSession() {
this.sessionStorageService.set('hero', 'Mr. Nice - Session');
}
setLocal() {
this.localStorageService.set('hero', 'Mr. Nice - Local');
}
}

View File

@ -0,0 +1,34 @@
// #docregion
import { Inject, Injectable, InjectionToken } from '@angular/core';
// #docregion storage-token
export const BROWSER_STORAGE = new InjectionToken<Storage>('Browser Storage', {
providedIn: 'root',
factory: () => localStorage
});
// #enddocregion storage-token
// #docregion inject-storage-token
@Injectable({
providedIn: 'root'
})
export class BrowserStorageService {
constructor(@Inject(BROWSER_STORAGE) public storage: Storage) {}
get(key: string) {
this.storage.getItem(key);
}
set(key: string, value: string) {
this.storage.setItem(key, value);
}
remove(key: string) {
this.storage.removeItem(key);
}
clear() {
this.storage.clear();
}
}
// #enddocregion inject-storage-token

View File

@ -6,7 +6,9 @@ import { LoggerService } from './logger.service';
import { UserService } from './user.service'; import { UserService } from './user.service';
// #docregion injectables, injectable // #docregion injectables, injectable
@Injectable() @Injectable({
providedIn: 'root'
})
export class UserContextService { export class UserContextService {
// #enddocregion injectables, injectable // #enddocregion injectables, injectable
name: string; name: string;

View File

@ -1,7 +1,9 @@
// #docregion // #docregion
import { Injectable } from '@angular/core'; import { Injectable } from '@angular/core';
@Injectable() @Injectable({
providedIn: 'root'
})
export class UserService { export class UserService {
getUserById(userId: number): any { getUserById(userId: number): any {

Some files were not shown because too many files have changed in this diff Show More