Compare commits
18 Commits
10.0.0-rc.
...
9.1.0-rc.1
Author | SHA1 | Date | |
---|---|---|---|
5357e643b3 | |||
f71d132f7c | |||
ba3edda230 | |||
0767d37c07 | |||
8ba24578bc | |||
133a97ad67 | |||
4e67a3ab3f | |||
377f0010fc | |||
6e09129e4c | |||
d80e51a6b1 | |||
feb66b00da | |||
cb19eac105 | |||
6e0564ade6 | |||
05eeb7d279 | |||
2ce5fa3cce | |||
e140cdcb34 | |||
14b2db1d43 | |||
2afc7e982e |
7
.bazelrc
7
.bazelrc
@ -33,11 +33,6 @@ build --incompatible_strict_action_env
|
||||
run --incompatible_strict_action_env
|
||||
test --incompatible_strict_action_env
|
||||
|
||||
# Do not build runfile trees by default. If an execution strategy relies on runfile
|
||||
# symlink teee, the tree is created on-demand. See: https://github.com/bazelbuild/bazel/issues/6627
|
||||
# and https://github.com/bazelbuild/bazel/commit/03246077f948f2790a83520e7dccc2625650e6df
|
||||
build --nobuild_runfile_links
|
||||
|
||||
###############################
|
||||
# Release support #
|
||||
# Turn on these settings with #
|
||||
@ -47,7 +42,7 @@ build --nobuild_runfile_links
|
||||
# Releases should always be stamped with version control info
|
||||
# This command assumes node on the path and is a workaround for
|
||||
# https://github.com/bazelbuild/bazel/issues/4802
|
||||
build:release --workspace_status_command="yarn -s ng-dev release build-env-stamp"
|
||||
build:release --workspace_status_command="node ./tools/bazel_stamp_vars.js"
|
||||
build:release --stamp
|
||||
|
||||
###############################
|
||||
|
@ -1,3 +1,3 @@
|
||||
3.2.0
|
||||
2.1.1
|
||||
# [NB: this comment has to be after the first line, see https://github.com/bazelbuild/bazelisk/issues/117]
|
||||
# When updating the Bazel version you also need to update the RBE toolchains version in package.bzl
|
||||
|
@ -12,8 +12,8 @@ We use this as a symmetric AES encryption key to encrypt tokens like
|
||||
a GitHub token that enables publishing snapshots.
|
||||
|
||||
To create the github_token file, we take this approach:
|
||||
- Find the angular-builds:token in the internal pw database
|
||||
- Find the angular-builds:token in http://valentine
|
||||
- Go inside the CircleCI default docker image so you use the same version of openssl as we will at runtime: `docker run --rm -it circleci/node:10.12`
|
||||
- echo "https://[token]:@github.com" > credentials
|
||||
- openssl aes-256-cbc -e -in credentials -out .circleci/github_token -k $KEY
|
||||
- If needed, base64-encode the result so you can copy-paste it out of docker: `base64 github_token`
|
||||
- If needed, base64-encode the result so you can copy-paste it out of docker: `base64 github_token`
|
@ -14,17 +14,8 @@ build --repository_cache=/home/circleci/bazel_repository_cache
|
||||
# Bazel doesn't calculate the memory ceiling correctly when running under Docker.
|
||||
# Limit Bazel to consuming resources that fit in CircleCI "xlarge" class
|
||||
# https://circleci.com/docs/2.0/configuration-reference/#resource_class
|
||||
build --local_cpu_resources=8
|
||||
build --local_ram_resources=14336
|
||||
build --local_resources=14336,8.0,1.0
|
||||
|
||||
# All build executed remotely should be done using our RBE configuration.
|
||||
build:remote --google_default_credentials
|
||||
|
||||
# Upload to GCP's Build Status viewer to allow for us to have better viewing of execution/build
|
||||
# logs. This is only done on CI as the BES (GCP's Build Status viewer) API requires credentials
|
||||
# from service accounts, rather than end user accounts.
|
||||
build:remote --bes_backend=buildeventservice.googleapis.com
|
||||
build:remote --bes_timeout=30s
|
||||
build:remote --bes_results_url="https://source.cloud.google.com/results/invocations/"
|
||||
|
||||
build --config=remote
|
||||
|
@ -10,10 +10,6 @@ try-import %workspace%/.circleci/bazel.common.rc
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=C:/Users/circleci/bazel_repository_cache
|
||||
|
||||
# Manually set the local resources used in windows CI runs
|
||||
build --local_ram_resources=13500
|
||||
build --local_cpu_resources=4
|
||||
|
||||
# All windows jobs run on master and should use http caching
|
||||
build --remote_http_cache=https://storage.googleapis.com/angular-team-cache
|
||||
build --remote_accept_cached=true
|
||||
|
@ -22,18 +22,18 @@ version: 2.1
|
||||
# **NOTE 1 **: If you change the cache key prefix, also sync the cache_key_fallback to match.
|
||||
# **NOTE 2 **: Keep the static part of the cache key as prefix to enable correct fallbacks.
|
||||
# See https://circleci.com/docs/2.0/caching/#restoring-cache for how prefixes work in CircleCI.
|
||||
var_3: &cache_key v7-angular-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_3: &cache_key v4-angular-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
# We invalidate the cache if the Bazel version changes because otherwise the `bazelisk` cache
|
||||
# folder will contain all previously used versions and ultimately cause the cache restoring to
|
||||
# be slower due to its growing size.
|
||||
var_4: &cache_key_fallback v7-angular-node-12-{{ checksum ".bazelversion" }}
|
||||
var_3_win: &cache_key_win v7-angular-win-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4_win: &cache_key_win_fallback v7-angular-win-node-12-{{ checksum ".bazelversion" }}
|
||||
var_4: &cache_key_fallback v4-angular-node-12-{{ checksum ".bazelversion" }}
|
||||
var_3_win: &cache_key_win v5-angular-win-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4_win: &cache_key_win_fallback v5-angular-win-node-12-{{ checksum ".bazelversion" }}
|
||||
|
||||
# Cache key for the `components-repo-unit-tests` job. **Note** when updating the SHA in the
|
||||
# cache keys also update the SHA for the "COMPONENTS_REPO_COMMIT" environment variable.
|
||||
var_5: &components_repo_unit_tests_cache_key v7-angular-components-189d98e8b01b33974328255f085de04251d61567
|
||||
var_6: &components_repo_unit_tests_cache_key_fallback v7-angular-components-
|
||||
var_5: &components_repo_unit_tests_cache_key v5-angular-components-598db096e668aa7e9debd56eedfd127b7a55e371
|
||||
var_6: &components_repo_unit_tests_cache_key_fallback v5-angular-components-
|
||||
|
||||
# Workspace initially persisted by the `setup` job, and then enhanced by `build-npm-packages` and
|
||||
# `build-ivy-npm-packages`.
|
||||
@ -67,6 +67,9 @@ var_10: &only_on_master
|
||||
# **NOTE 1**: Pin to exact images using an ID (SHA). See https://circleci.com/docs/2.0/circleci-images/#using-a-docker-image-id-to-pin-an-image-to-a-fixed-version.
|
||||
# (Using the tag in not necessary when pinning by ID, but include it anyway for documentation purposes.)
|
||||
# **NOTE 2**: If you change the version of the docker images, also change the `cache_key` suffix.
|
||||
# **NOTE 3**: If you change the version of the `*-browsers` docker image, make sure the
|
||||
# `--versions.chrome` arg in `integration/bazel-schematics/test.sh` specifies a
|
||||
# ChromeDriver version that is compatible with the Chrome version in the image.
|
||||
executors:
|
||||
default-executor:
|
||||
parameters:
|
||||
@ -117,7 +120,7 @@ commands:
|
||||
sudo apt-get update
|
||||
# Install GTK+ graphical user interface (libgtk-3-0), advanced linux sound architecture (libasound2)
|
||||
# and network security service libraries (libnss3) & X11 Screen Saver extension library (libssx1)
|
||||
# which are dependencies of chrome & needed for karma & protractor headless chrome tests.
|
||||
# which are dependendies of chrome & needed for karma & protractor headless chrome tests.
|
||||
# This is a very small install which takes around 7s in comparing to using the full
|
||||
# circleci/node:x.x.x-browsers image.
|
||||
sudo apt-get -y install libgtk-3-0 libasound2 libnss3 libxss1
|
||||
@ -160,7 +163,7 @@ commands:
|
||||
description: Sets up a domain that resolves to the local host.
|
||||
steps:
|
||||
- run:
|
||||
name: Preparing environment for running tests on Sauce Labs.
|
||||
name: Preparing environment for running tests on Saucelabs.
|
||||
command: |
|
||||
# For SauceLabs jobs, we set up a domain which resolves to the machine which launched
|
||||
# the tunnel. We do this because devices are sometimes not able to properly resolve
|
||||
@ -172,13 +175,13 @@ commands:
|
||||
setSecretVar SAUCE_ACCESS_KEY $(echo $SAUCE_ACCESS_KEY | rev)
|
||||
- run:
|
||||
# Sets up a local domain in the machine's host file that resolves to the local
|
||||
# host. This domain is helpful in Sauce Labs tests where devices are not able to
|
||||
# host. This domain is helpful in Saucelabs tests where devices are not able to
|
||||
# properly resolve `localhost` or `127.0.0.1` through the sauce-connect tunnel.
|
||||
name: Setting up alias domain for local host.
|
||||
command: echo "127.0.0.1 $SAUCE_LOCALHOST_ALIAS_DOMAIN" | sudo tee -a /etc/hosts
|
||||
|
||||
# Normally this would be an individual job instead of a command.
|
||||
# But startup and setup time for each individual windows job are high enough to discourage
|
||||
# But startup and setup time for each invidual windows job are high enough to discourage
|
||||
# many small jobs, so instead we use a command for setup unless the gain becomes significant.
|
||||
setup_win:
|
||||
description: Setup windows node environment
|
||||
@ -223,7 +226,6 @@ jobs:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- checkout
|
||||
- init_environment
|
||||
- run:
|
||||
name: Rebase PR on target branch
|
||||
# After checkout, rebase on top of target branch.
|
||||
@ -233,7 +235,7 @@ jobs:
|
||||
git config user.name "angular-ci"
|
||||
git config user.email "angular-ci"
|
||||
# Rebase PR on top of target branch.
|
||||
node tools/rebase-pr.js
|
||||
node tools/rebase-pr.js angular/angular ${CIRCLE_PR_NUMBER}
|
||||
else
|
||||
echo "This build is not over a PR, nothing to do."
|
||||
fi
|
||||
@ -242,6 +244,7 @@ jobs:
|
||||
keys:
|
||||
- *cache_key
|
||||
- *cache_key_fallback
|
||||
- init_environment
|
||||
- run:
|
||||
name: Running Yarn install
|
||||
command: yarn install --frozen-lockfile --non-interactive
|
||||
@ -269,11 +272,15 @@ jobs:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
|
||||
- run: yarn -s tslint
|
||||
- run: yarn -s ng-dev format changed $CI_GIT_BASE_REVISION --check
|
||||
- run: yarn -s ts-circular-deps:check
|
||||
- run: yarn -s ng-dev pullapprove verify
|
||||
- run: yarn -s ng-dev commit-message validate-range --range $CI_COMMIT_RANGE
|
||||
- run: 'yarn bazel:format -mode=check ||
|
||||
(echo "BUILD files not formatted. Please run ''yarn bazel:format''" ; exit 1)'
|
||||
# Run the skylark linter to check our Bazel rules
|
||||
- run: 'yarn bazel:lint ||
|
||||
(echo -e "\n.bzl files have lint errors. Please run ''yarn bazel:lint-fix''"; exit 1)'
|
||||
|
||||
- run: yarn lint
|
||||
- run: yarn ts-circular-deps:check
|
||||
- run: node tools/pullapprove/verify.js
|
||||
|
||||
test:
|
||||
executor:
|
||||
@ -381,6 +388,10 @@ jobs:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Compile dependencies to ivy
|
||||
# Running `ngcc` here (instead of implicitly via `ng build`) allows us to take advantage of
|
||||
# the parallel, async mode speed-up (~20-25s on CI).
|
||||
- run: yarn --cwd aio ngcc --properties es2015
|
||||
# Build aio
|
||||
- run: yarn --cwd aio build --progress=false
|
||||
# Lint the code
|
||||
@ -443,7 +454,7 @@ jobs:
|
||||
|
||||
test_docs_examples:
|
||||
parameters:
|
||||
viewengine:
|
||||
ivy:
|
||||
type: boolean
|
||||
default: false
|
||||
executor:
|
||||
@ -459,7 +470,7 @@ jobs:
|
||||
# Run examples tests. The "CIRCLE_NODE_INDEX" will be set if "parallelism" is enabled.
|
||||
# Since the parallelism is set to "5", there will be five parallel CircleCI containers.
|
||||
# with either "0", "1", etc as node index. This can be passed to the "--shard" argument.
|
||||
- run: yarn --cwd aio example-e2e --setup --local <<# parameters.viewengine >>--viewengine<</ parameters.viewengine >> --cliSpecsConcurrency=5 --shard=${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL} --retry 2
|
||||
- run: yarn --cwd aio example-e2e --setup --local <<# parameters.ivy >>--ivy<</ parameters.ivy >> --cliSpecsConcurrency=5 --shard=${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL} --retry 2
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
aio_preview:
|
||||
@ -596,8 +607,8 @@ jobs:
|
||||
- run:
|
||||
name: Decrypt github credentials
|
||||
# We need ensure that the same default digest is used for encoding and decoding with
|
||||
# OpenSSL. OpenSSL versions might have different default digests which can cause
|
||||
# decryption failures based on the installed OpenSSL version. https://stackoverflow.com/a/39641378/4317734
|
||||
# openssl. Openssl versions might have different default digests which can cause
|
||||
# decryption failures based on the installed openssl version. https://stackoverflow.com/a/39641378/4317734
|
||||
command: 'openssl aes-256-cbc -d -in .circleci/github_token -md md5 -k "${KEY}" -out ~/.git_credentials'
|
||||
- run: ./scripts/ci/publish-build-artifacts.sh
|
||||
|
||||
@ -811,8 +822,8 @@ workflows:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_docs_examples:
|
||||
name: test_docs_examples_viewengine
|
||||
viewengine: true
|
||||
name: test_docs_examples_ivy
|
||||
ivy: true
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- aio_preview:
|
||||
@ -839,7 +850,7 @@ workflows:
|
||||
- test_aio_local
|
||||
- test_aio_local_viewengine
|
||||
- test_docs_examples
|
||||
- test_docs_examples_viewengine
|
||||
- test_docs_examples_ivy
|
||||
# Get the artifacts to publish from the build-packages-dist job
|
||||
# since the publishing script expects the legacy outputs layout.
|
||||
- build-npm-packages
|
||||
|
@ -3,7 +3,6 @@
|
||||
# Variables
|
||||
readonly projectDir=$(realpath "$(dirname ${BASH_SOURCE[0]})/..")
|
||||
readonly envHelpersPath="$projectDir/.circleci/env-helpers.inc.sh";
|
||||
readonly bashEnvCachePath="$projectDir/.circleci/bash_env_cache";
|
||||
|
||||
# Load helpers and make them available everywhere (through `$BASH_ENV`).
|
||||
source $envHelpersPath;
|
||||
@ -15,7 +14,6 @@ echo "source $envHelpersPath;" >> $BASH_ENV;
|
||||
####################################################################################################
|
||||
# See https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables for more info.
|
||||
####################################################################################################
|
||||
setPublicVar CI "$CI"
|
||||
setPublicVar PROJECT_ROOT "$projectDir";
|
||||
setPublicVar CI_AIO_MIN_PWA_SCORE "95";
|
||||
# This is the branch being built; e.g. `pull/12345` for PR builds.
|
||||
@ -24,14 +22,10 @@ setPublicVar CI_BUILD_URL "$CIRCLE_BUILD_URL";
|
||||
setPublicVar CI_COMMIT "$CIRCLE_SHA1";
|
||||
# `CI_COMMIT_RANGE` is only used on push builds (a.k.a. non-PR, non-scheduled builds and rerun
|
||||
# workflows of such builds).
|
||||
setPublicVar CI_GIT_BASE_REVISION "${CIRCLE_GIT_BASE_REVISION}";
|
||||
setPublicVar CI_GIT_REVISION "${CIRCLE_GIT_REVISION}";
|
||||
setPublicVar CI_COMMIT_RANGE "$CIRCLE_GIT_BASE_REVISION..$CIRCLE_GIT_REVISION";
|
||||
setPublicVar CI_PULL_REQUEST "${CIRCLE_PR_NUMBER:-false}";
|
||||
setPublicVar CI_REPO_NAME "$CIRCLE_PROJECT_REPONAME";
|
||||
setPublicVar CI_REPO_OWNER "$CIRCLE_PROJECT_USERNAME";
|
||||
setPublicVar CI_PR_REPONAME "$CIRCLE_PR_REPONAME";
|
||||
setPublicVar CI_PR_USERNAME "$CIRCLE_PR_USERNAME";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
@ -74,7 +68,7 @@ setPublicVar COMPONENTS_REPO_TMP_DIR "/tmp/angular-components-repo"
|
||||
setPublicVar COMPONENTS_REPO_URL "https://github.com/angular/components.git"
|
||||
setPublicVar COMPONENTS_REPO_BRANCH "master"
|
||||
# **NOTE**: When updating the commit SHA, also update the cache key in the CircleCI `config.yml`.
|
||||
setPublicVar COMPONENTS_REPO_COMMIT "189d98e8b01b33974328255f085de04251d61567"
|
||||
setPublicVar COMPONENTS_REPO_COMMIT "598db096e668aa7e9debd56eedfd127b7a55e371"
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
@ -60,15 +60,14 @@ if (require.resolve === module) {
|
||||
|
||||
// Helpers
|
||||
function _main(args) {
|
||||
triggerWebhook(...args)
|
||||
.then(
|
||||
({statusCode, responseText}) => (200 <= statusCode && statusCode < 400) ?
|
||||
console.log(`Status: ${statusCode}\n${responseText}`) :
|
||||
Promise.reject(new Error(`Request failed (status: ${statusCode}): ${responseText}`)))
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
triggerWebhook(...args).
|
||||
then(({statusCode, responseText}) => (200 <= statusCode && statusCode < 400) ?
|
||||
console.log(`Status: ${statusCode}\n${responseText}`) :
|
||||
Promise.reject(new Error(`Request failed (status: ${statusCode}): ${responseText}`))).
|
||||
catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function postJson(url, data) {
|
||||
@ -78,12 +77,15 @@ function postJson(url, data) {
|
||||
const statusCode = res.statusCode || -1;
|
||||
let responseText = '';
|
||||
|
||||
res.on('error', reject)
|
||||
.on('data', d => responseText += d)
|
||||
.on('end', () => resolve({statusCode, responseText}));
|
||||
res.
|
||||
on('error', reject).
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => resolve({statusCode, responseText}));
|
||||
};
|
||||
|
||||
request(url, opts, onResponse).on('error', reject).end(JSON.stringify(data));
|
||||
request(url, opts, onResponse).
|
||||
on('error', reject).
|
||||
end(JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
|
47
.dev-infra.json
Normal file
47
.dev-infra.json
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"commitMessage": {
|
||||
"maxLength": 120,
|
||||
"minBodyLength": 0,
|
||||
"types": [
|
||||
"build",
|
||||
"ci",
|
||||
"docs",
|
||||
"feat",
|
||||
"fix",
|
||||
"perf",
|
||||
"refactor",
|
||||
"release",
|
||||
"style",
|
||||
"test"
|
||||
],
|
||||
"scopes": [
|
||||
"animations",
|
||||
"bazel",
|
||||
"benchpress",
|
||||
"changelog",
|
||||
"common",
|
||||
"compiler",
|
||||
"compiler-cli",
|
||||
"core",
|
||||
"dev-infra",
|
||||
"docs-infra",
|
||||
"elements",
|
||||
"forms",
|
||||
"http",
|
||||
"language-service",
|
||||
"localize",
|
||||
"ngcc",
|
||||
"packaging",
|
||||
"platform-browser",
|
||||
"platform-browser-dynamic",
|
||||
"platform-server",
|
||||
"platform-webworker",
|
||||
"platform-webworker-dynamic",
|
||||
"router",
|
||||
"service-worker",
|
||||
"upgrade",
|
||||
"ve",
|
||||
"zone.js"
|
||||
]
|
||||
}
|
||||
}
|
4
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
4
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
@ -32,13 +32,13 @@ Existing issues often contain information about workarounds, resolution, or prog
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
<!--
|
||||
Please create and share minimal reproduction of the issue starting with this template: https://stackblitz.com/fork/angular-ivy
|
||||
Please create and share minimal reproduction of the issue starting with this template: https://stackblitz.com/fork/angular-issue-repro2
|
||||
-->
|
||||
<!-- ✍️--> https://stackblitz.com/...
|
||||
|
||||
<!--
|
||||
If StackBlitz is not suitable for reproduction of your issue, please create a minimal GitHub repository with the reproduction of the issue.
|
||||
A good way to make a minimal reproduction is to create a new app via `ng new repro-app` and add the minimum possible code to show the problem.
|
||||
A good way to make a minimal reproduction is to create a new app via `ng new repro-app` and add the minimum possible code to show the problem.
|
||||
Share the link to the repo below along with step-by-step instructions to reproduce the problem, as well as expected and actual behavior.
|
||||
|
||||
Issues that don't have enough info and can't be reproduced will be closed.
|
||||
|
@ -1,119 +0,0 @@
|
||||
import {MergeConfig} from '../dev-infra/pr/merge/config';
|
||||
|
||||
// The configuration for `ng-dev commit-message` commands.
|
||||
const commitMessage = {
|
||||
'maxLength': 120,
|
||||
'minBodyLength': 100,
|
||||
'types': [
|
||||
'build',
|
||||
'ci',
|
||||
'docs',
|
||||
'feat',
|
||||
'fix',
|
||||
'perf',
|
||||
'refactor',
|
||||
'release',
|
||||
'style',
|
||||
'test',
|
||||
],
|
||||
'scopes': [
|
||||
'animations',
|
||||
'bazel',
|
||||
'benchpress',
|
||||
'changelog',
|
||||
'common',
|
||||
'compiler',
|
||||
'compiler-cli',
|
||||
'core',
|
||||
'dev-infra',
|
||||
'docs-infra',
|
||||
'elements',
|
||||
'forms',
|
||||
'http',
|
||||
'language-service',
|
||||
'localize',
|
||||
'migrations',
|
||||
'ngcc',
|
||||
'packaging',
|
||||
'platform-browser',
|
||||
'platform-browser-dynamic',
|
||||
'platform-server',
|
||||
'platform-webworker',
|
||||
'platform-webworker-dynamic',
|
||||
'router',
|
||||
'service-worker',
|
||||
'upgrade',
|
||||
've',
|
||||
'zone.js',
|
||||
]
|
||||
};
|
||||
|
||||
// The configuration for `ng-dev format` commands.
|
||||
const format = {
|
||||
'clang-format': {
|
||||
'matchers': [
|
||||
'**/*.{js,ts}',
|
||||
// TODO: burn down format failures and remove aio and integration exceptions.
|
||||
'!aio/**',
|
||||
'!integration/**',
|
||||
// TODO: remove this exclusion as part of IE deprecation.
|
||||
'!shims_for_IE.js',
|
||||
// Both third_party and .yarn are directories containing copied code which should
|
||||
// not be modified.
|
||||
'!third_party/**',
|
||||
'!.yarn/**',
|
||||
// Do not format d.ts files as they are generated
|
||||
'!**/*.d.ts',
|
||||
]
|
||||
},
|
||||
'buildifier': true
|
||||
};
|
||||
|
||||
/** Github metadata information for `ng-dev` commands. */
|
||||
const github = {
|
||||
owner: 'angular',
|
||||
name: 'angular',
|
||||
};
|
||||
|
||||
// Configuration for the `ng-dev pr merge` command. The command can be used
|
||||
// for merging upstream pull requests into branches based on a PR target label.
|
||||
const merge = () => {
|
||||
// TODO: resume dynamically determining patch branch
|
||||
const patch = '10.0.x';
|
||||
const config: MergeConfig = {
|
||||
githubApiMerge: false,
|
||||
claSignedLabel: 'cla: yes',
|
||||
mergeReadyLabel: /^PR action: merge(-assistance)?/,
|
||||
commitMessageFixupLabel: 'commit message fixup',
|
||||
labels: [
|
||||
{
|
||||
pattern: 'PR target: master-only',
|
||||
branches: ['master'],
|
||||
},
|
||||
{
|
||||
pattern: 'PR target: patch-only',
|
||||
branches: [patch],
|
||||
},
|
||||
{
|
||||
pattern: 'PR target: master & patch',
|
||||
branches: ['master', patch],
|
||||
},
|
||||
],
|
||||
requiredBaseCommits: {
|
||||
// PRs that target either `master` or the patch branch, need to be rebased
|
||||
// on top of the latest commit message validation fix.
|
||||
// These SHAs are the commits that update the required license text in the header.
|
||||
'master': '5aeb9a4124922d8ac08eb73b8f322905a32b0b3a',
|
||||
[patch]: '27b95ba64a5d99757f4042073fd1860e20e3ed24'
|
||||
},
|
||||
};
|
||||
return config;
|
||||
};
|
||||
|
||||
// Export function to build ng-dev configuration object.
|
||||
module.exports = {
|
||||
commitMessage,
|
||||
format,
|
||||
github,
|
||||
merge,
|
||||
};
|
354
.pullapprove.yml
354
.pullapprove.yml
@ -80,8 +80,8 @@
|
||||
# Used for approving minor changes, large-scale refactorings, and in emergency situations.
|
||||
#
|
||||
# IgorMinar
|
||||
# jelbourn
|
||||
# josephperrott
|
||||
# kara
|
||||
# mhevery
|
||||
#
|
||||
# =========================================================
|
||||
@ -97,12 +97,6 @@
|
||||
|
||||
version: 3
|
||||
|
||||
# Meta field that goes unused by PullApprove to allow for defining aliases to be
|
||||
# used throughout the config.
|
||||
meta:
|
||||
1: &can-be-global-approved "\"global-approvers\" not in groups.approved"
|
||||
2: &can-be-global-docs-approved "\"global-docs-approvers\" not in groups.approved"
|
||||
|
||||
# turn on 'draft' support
|
||||
# https://docs.pullapprove.com/config/github-api-version/
|
||||
# https://developer.github.com/v3/previews/#draft-pull-requests
|
||||
@ -121,49 +115,11 @@ pullapprove_conditions:
|
||||
|
||||
|
||||
groups:
|
||||
# =========================================================
|
||||
# Global Approvers
|
||||
#
|
||||
# All reviews performed for global approvals require using
|
||||
# the `Reviewed-for:` specifier to set the approval
|
||||
# specificity as documented at:
|
||||
# https://docs.pullapprove.com/reviewed-for/
|
||||
# =========================================================
|
||||
global-approvers:
|
||||
type: optional
|
||||
reviewers:
|
||||
teams:
|
||||
- framework-global-approvers
|
||||
reviews:
|
||||
request: 0
|
||||
required: 1
|
||||
reviewed_for: required
|
||||
|
||||
# =========================================================
|
||||
# Global Approvers For Docs
|
||||
#
|
||||
# All reviews performed for global docs approvals require
|
||||
# using the `Reviewed-for:` specifier to set the approval
|
||||
# specificity as documented at:
|
||||
# https://docs.pullapprove.com/reviewed-for/
|
||||
# =========================================================
|
||||
global-docs-approvers:
|
||||
type: optional
|
||||
reviewers:
|
||||
teams:
|
||||
- framework-global-approvers-for-docs-only-changes
|
||||
reviews:
|
||||
request: 0
|
||||
required: 1
|
||||
reviewed_for: required
|
||||
|
||||
# =========================================================
|
||||
# Framework: Animations
|
||||
# =========================================================
|
||||
fw-animations:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/animations/**',
|
||||
@ -179,6 +135,9 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- matsko
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -186,10 +145,8 @@ groups:
|
||||
# =========================================================
|
||||
fw-compiler:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files.exclude('packages/compiler-cli/ngcc/**'), [
|
||||
contains_any_globs(files, [
|
||||
'packages/compiler/**',
|
||||
'packages/examples/compiler/**',
|
||||
'packages/compiler-cli/**',
|
||||
@ -203,6 +160,10 @@ groups:
|
||||
- alxhub
|
||||
- AndrewKushnir
|
||||
- JoostK
|
||||
- kara
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -210,30 +171,19 @@ groups:
|
||||
# =========================================================
|
||||
fw-ngcc:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- files.include('packages/compiler-cli/ngcc/**')
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/compiler-cli/ngcc/**'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
- gkalpak
|
||||
- JoostK
|
||||
- petebacondarwin
|
||||
|
||||
|
||||
# =========================================================
|
||||
# Framework: Migrations
|
||||
# =========================================================
|
||||
fw-migrations:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- files.include("packages/core/schematics/**")
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
- crisbeto
|
||||
- devversion
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -241,10 +191,8 @@ groups:
|
||||
# =========================================================
|
||||
fw-core:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files.exclude("packages/core/schematics/**"), [
|
||||
contains_any_globs(files, [
|
||||
'packages/core/**',
|
||||
'packages/examples/core/**',
|
||||
'packages/common/**',
|
||||
@ -349,10 +297,12 @@ groups:
|
||||
users:
|
||||
- alxhub
|
||||
- AndrewKushnir
|
||||
- atscott
|
||||
- ~kara # do not request reviews from Kara, but allow her to approve PRs
|
||||
- kara
|
||||
- mhevery
|
||||
- pkozlowski-opensource
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -360,11 +310,10 @@ groups:
|
||||
# =========================================================
|
||||
fw-http:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/common/http/**',
|
||||
'packages/http/**',
|
||||
'packages/examples/http/**',
|
||||
'aio/content/guide/http.md',
|
||||
'aio/content/examples/http/**',
|
||||
@ -374,6 +323,9 @@ groups:
|
||||
users:
|
||||
- alxhub
|
||||
- IgorMinar
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -381,8 +333,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-elements:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/elements/**',
|
||||
@ -394,6 +344,9 @@ groups:
|
||||
users:
|
||||
- andrewseguin
|
||||
- gkalpak
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -401,8 +354,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-forms:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/forms/**',
|
||||
@ -426,6 +377,9 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- AndrewKushnir
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -433,8 +387,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-i18n:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/core/src/i18n/**',
|
||||
@ -459,6 +411,9 @@ groups:
|
||||
- AndrewKushnir
|
||||
- mhevery
|
||||
- petebacondarwin
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -466,8 +421,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-platform-server:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/platform-server/**',
|
||||
@ -478,6 +431,9 @@ groups:
|
||||
users:
|
||||
- alxhub
|
||||
- kyliau
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -485,30 +441,27 @@ groups:
|
||||
# =========================================================
|
||||
fw-router:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/router/**',
|
||||
'packages/examples/router/**',
|
||||
'aio/content/guide/router.md',
|
||||
'aio/content/guide/router-tutorial.md',
|
||||
'aio/content/examples/router-tutorial/**',
|
||||
'aio/content/examples/router/**',
|
||||
'aio/content/images/guide/router/**'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- atscott
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
# Framework: Service Worker
|
||||
# =========================================================
|
||||
fw-service-worker:
|
||||
fw-server-worker:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/service-worker/**',
|
||||
@ -527,6 +480,9 @@ groups:
|
||||
- alxhub
|
||||
- gkalpak
|
||||
- IgorMinar
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -534,8 +490,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-upgrade:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/upgrade/**',
|
||||
@ -557,6 +511,9 @@ groups:
|
||||
users:
|
||||
- gkalpak
|
||||
- petebacondarwin
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -564,10 +521,8 @@ groups:
|
||||
# =========================================================
|
||||
fw-testing:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files.exclude('packages/compiler-cli/**'), [
|
||||
contains_any_globs(files, [
|
||||
'**/testing/**',
|
||||
'aio/content/guide/testing.md',
|
||||
'aio/content/examples/testing/**',
|
||||
@ -575,9 +530,12 @@ groups:
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- AndrewKushnir
|
||||
- IgorMinar
|
||||
- kara
|
||||
- pkozlowski-opensource
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -585,7 +543,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-benchmarks:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'modules/benchmarks/**'
|
||||
@ -593,7 +550,10 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
- kara
|
||||
- pkozlowski-opensource
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -601,7 +561,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-playground:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'modules/playground/**'
|
||||
@ -609,8 +568,9 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
- jelbourn
|
||||
- pkozlowski-opensource
|
||||
- kara
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -618,8 +578,6 @@ groups:
|
||||
# =========================================================
|
||||
fw-security:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/core/src/sanitization/**',
|
||||
@ -634,30 +592,28 @@ groups:
|
||||
users:
|
||||
- IgorMinar
|
||||
- mhevery
|
||||
- jelbourn
|
||||
- pkozlowski-opensource
|
||||
reviews:
|
||||
request: -1 # request reviews from everyone
|
||||
required: 2 # require at least 2 approvals
|
||||
reviewed_for: required
|
||||
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
# =========================================================
|
||||
# Bazel
|
||||
# =========================================================
|
||||
bazel:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/bazel/**',
|
||||
'aio/content/guide/bazel.md'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
- josephperrott
|
||||
- kyliau
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -665,8 +621,6 @@ groups:
|
||||
# =========================================================
|
||||
language-service:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/language-service/**',
|
||||
@ -677,6 +631,9 @@ groups:
|
||||
users:
|
||||
- ayazhafiz
|
||||
- kyliau
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -684,8 +641,6 @@ groups:
|
||||
# =========================================================
|
||||
zone-js:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/zone.js/**',
|
||||
@ -695,6 +650,9 @@ groups:
|
||||
users:
|
||||
- JiaLiPassion
|
||||
- mhevery
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -702,8 +660,6 @@ groups:
|
||||
# =========================================================
|
||||
benchpress:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/benchpress/**'
|
||||
@ -711,7 +667,9 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
- josephperrott
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -719,7 +677,6 @@ groups:
|
||||
# =========================================================
|
||||
integration-tests:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'integration/**'
|
||||
@ -728,7 +685,10 @@ groups:
|
||||
users:
|
||||
- IgorMinar
|
||||
- josephperrott
|
||||
- kara
|
||||
- mhevery
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -736,8 +696,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-getting-started-and-tutorial:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/guide/setup-local.md',
|
||||
@ -761,6 +719,9 @@ groups:
|
||||
- aikidave
|
||||
- IgorMinar
|
||||
- StephenFluin
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -768,8 +729,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-marketing:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/marketing/**',
|
||||
@ -781,17 +740,18 @@ groups:
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- aikidave
|
||||
- IgorMinar
|
||||
- StephenFluin
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
# Docs: Observables
|
||||
# =========================================================
|
||||
docs-observables:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/guide/observables.md',
|
||||
@ -808,6 +768,9 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -815,8 +778,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-packaging-and-releasing:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'docs/PUBLIC_API.md',
|
||||
@ -840,33 +801,10 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
- jelbourn
|
||||
|
||||
|
||||
# =========================================================
|
||||
# Tooling: Compiler API shared with Angular CLI
|
||||
#
|
||||
# Changing this API might break Angular CLI, so we require
|
||||
# the CLI team to approve changes here.
|
||||
# =========================================================
|
||||
tooling-cli-shared-api:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'packages/compiler-cli/src/tooling.ts'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- alan-agius4
|
||||
- clydin
|
||||
- kyliau
|
||||
- IgorMinar
|
||||
reviews:
|
||||
request: -1 # request reviews from everyone
|
||||
required: 2 # require at least 2 approvals
|
||||
reviewed_for: required
|
||||
- kara
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -874,8 +812,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-cli:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/cli/**',
|
||||
@ -889,7 +825,7 @@ groups:
|
||||
'aio/content/images/guide/deployment/**',
|
||||
'aio/content/guide/file-structure.md',
|
||||
'aio/content/guide/ivy.md',
|
||||
'aio/content/guide/web-worker.md',
|
||||
'aio/content/guide/web-worker.md'
|
||||
'aio/content/guide/workspace-config.md',
|
||||
])
|
||||
reviewers:
|
||||
@ -897,6 +833,9 @@ groups:
|
||||
- clydin
|
||||
- IgorMinar
|
||||
- mgechev
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -904,8 +843,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-libraries:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/guide/creating-libraries.md',
|
||||
@ -917,6 +854,9 @@ groups:
|
||||
- alan-agius4
|
||||
- IgorMinar
|
||||
- mgechev
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -924,8 +864,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-schematics:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/content/guide/schematics.md',
|
||||
@ -939,6 +877,9 @@ groups:
|
||||
- alan-agius4
|
||||
- IgorMinar
|
||||
- mgechev
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -946,8 +887,6 @@ groups:
|
||||
# =========================================================
|
||||
docs-infra:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- *can-be-global-docs-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'aio/*',
|
||||
@ -968,6 +907,9 @@ groups:
|
||||
- gkalpak
|
||||
- IgorMinar
|
||||
- petebacondarwin
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
- ~framework-global-approvers-for-docs-only-changes
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -975,14 +917,12 @@ groups:
|
||||
# =========================================================
|
||||
dev-infra:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files.exclude("CHANGELOG.md"), [
|
||||
contains_any_globs(files, [
|
||||
'*',
|
||||
'.circleci/**',
|
||||
'.devcontainer/**',
|
||||
'.github/**',
|
||||
'.ng-dev/**',
|
||||
'.vscode/**',
|
||||
'.yarn/**',
|
||||
'dev-infra/**',
|
||||
@ -998,6 +938,8 @@ groups:
|
||||
'docs/TOOLS.md',
|
||||
'docs/TRIAGE_AND_LABELS.md',
|
||||
'goldens/*',
|
||||
'modules/e2e_util/e2e_util.ts',
|
||||
'modules/e2e_util/perf_util.ts',
|
||||
'modules/*',
|
||||
'packages/*',
|
||||
'packages/examples/test-utils/**',
|
||||
@ -1005,12 +947,18 @@ groups:
|
||||
'packages/examples/*',
|
||||
'scripts/**',
|
||||
'third_party/**',
|
||||
'tools/brotli-cli/**',
|
||||
'tools/browsers/**',
|
||||
'tools/build/**',
|
||||
'tools/circular_dependency_test/**',
|
||||
'tools/contributing-stats/**',
|
||||
'tools/components/**'
|
||||
'tools/gulp-tasks/**',
|
||||
'tools/ng_rollup_bundle/**',
|
||||
'tools/ngcontainer/**',
|
||||
'tools/npm/**',
|
||||
'tools/npm_integration_test/**',
|
||||
'tools/pullapprove/**',
|
||||
'tools/rxjs/**',
|
||||
'tools/saucelabs/**',
|
||||
'tools/size-tracking/**',
|
||||
@ -1020,6 +968,7 @@ groups:
|
||||
'tools/ts-api-guardian/**',
|
||||
'tools/tslint/**',
|
||||
'tools/utils/**',
|
||||
'tools/validate-commit-message/**',
|
||||
'tools/yarn/**',
|
||||
'tools/*',
|
||||
'**/*.bzl',
|
||||
@ -1032,6 +981,8 @@ groups:
|
||||
- gkalpak
|
||||
- IgorMinar
|
||||
- josephperrott
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# =========================================================
|
||||
@ -1039,11 +990,9 @@ groups:
|
||||
# =========================================================
|
||||
public-api:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'goldens/public-api/**',
|
||||
'CHANGELOG.md',
|
||||
'docs/NAMING.md',
|
||||
'aio/content/guide/glossary.md',
|
||||
'aio/content/guide/styleguide.md',
|
||||
@ -1052,14 +1001,9 @@ groups:
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
- IgorMinar
|
||||
- jelbourn
|
||||
- pkozlowski-opensource
|
||||
reviews:
|
||||
request: -1 # request reviews from everyone
|
||||
required: 3 # require at least 3 approvals
|
||||
reviewed_for: required
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# ================================================
|
||||
@ -1067,21 +1011,17 @@ groups:
|
||||
# ================================================
|
||||
size-tracking:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'goldens/size-tracking/**'
|
||||
'aio/scripts/_payload-limits.json',
|
||||
'integration/_payload-limits.json'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- alxhub
|
||||
- IgorMinar
|
||||
- jelbourn
|
||||
- pkozlowski-opensource
|
||||
reviews:
|
||||
request: -1 # request reviews from everyone
|
||||
required: 2 # require at least 2 approvals
|
||||
reviewed_for: required
|
||||
- kara
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# ================================================
|
||||
@ -1089,17 +1029,17 @@ groups:
|
||||
# ================================================
|
||||
circular-dependencies:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'goldens/circular-deps/packages.json'
|
||||
'goldens/packages-circular-deps.json'
|
||||
])
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
- jelbourn
|
||||
- josephperrott
|
||||
- pkozlowski-opensource
|
||||
- kara
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
####################################################################################
|
||||
@ -1111,7 +1051,6 @@ groups:
|
||||
# =========================================================
|
||||
code-ownership:
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
- >
|
||||
contains_any_globs(files, [
|
||||
'.pullapprove.yml'
|
||||
@ -1119,38 +1058,21 @@ groups:
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
||||
|
||||
# ====================================================
|
||||
# Catch all for if no groups match the code change
|
||||
# ====================================================
|
||||
fallback:
|
||||
# A group is considered to be `active` for a PR if at least one of group's
|
||||
# conditions matches the PR.
|
||||
#
|
||||
# The PullApprove CI check should fail if a PR has no `active` groups, as
|
||||
# this indicates the PR is modifying a file that has no owner.
|
||||
#
|
||||
# This is enforced through the pullapprove verification check done
|
||||
# as part of the CircleCI lint job. Failures in this lint job should be
|
||||
# fixed as part of the PR. This can be done by updating the
|
||||
# `.pullapprove.yml` file cover the unmatched path.
|
||||
# The pullapprove verification script is part of the ng-dev tool and can be
|
||||
# run locally with the command: `yarn -s ng-dev pullapprove verify`
|
||||
#
|
||||
# For cases in which the verification check fails to ensure coverage, this
|
||||
# group will be active. The expectation is that this should be remedied
|
||||
# before merging the PR as described above. In an emergency situation
|
||||
# `global-approvers` can still approve PRs that match this `fallback` rule,
|
||||
# but that should be an exception and not an expectation.
|
||||
conditions:
|
||||
- *can-be-global-approved
|
||||
# The following groups have no conditions and will be `active` on all PRs
|
||||
# - `global-approvers`
|
||||
# - `global-docs-approvers`
|
||||
#
|
||||
# Since this means the minimum number of active groups a PR can have is 2, this
|
||||
# `fallback` group should be matched anytime the number of active groups is at or
|
||||
# below this minimum. This work as a protection to ensure that pullapprove does
|
||||
# not incidently mark a PR as passing without meeting the review criteria.
|
||||
- len(groups.active) <= 2
|
||||
# Groups which are found to have matching conditions are `active`
|
||||
# according to PullApprove. If no groups are matched and considered
|
||||
# active, we still want to have a review occur.
|
||||
- len(groups.active) == 0
|
||||
reviewers:
|
||||
users:
|
||||
- IgorMinar
|
||||
teams:
|
||||
- ~framework-global-approvers
|
||||
|
@ -34805,27 +34805,15 @@ function hasMergeConflicts(str) {
|
||||
function parse(str, fileLoc) {
|
||||
const parser = new Parser(str, fileLoc);
|
||||
parser.next();
|
||||
|
||||
if (!fileLoc.endsWith(`.yml`)) {
|
||||
try {
|
||||
return parser.parse();
|
||||
} catch (error1) {
|
||||
try {
|
||||
return parser.parse();
|
||||
} catch (error1) {
|
||||
try {
|
||||
return safeLoad(str, {
|
||||
schema: FAILSAFE_SCHEMA
|
||||
});
|
||||
} catch (error2) {
|
||||
throw error1;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const result = safeLoad(str, {
|
||||
schema: FAILSAFE_SCHEMA
|
||||
});
|
||||
if (typeof result === 'object') {
|
||||
return result;
|
||||
} else {
|
||||
return {};
|
||||
return safeLoad(str, {
|
||||
schema: FAILSAFE_SCHEMA
|
||||
});
|
||||
} catch (error2) {
|
||||
throw error1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -46678,7 +46666,7 @@ function mkdirfix (name, opts, cb) {
|
||||
/* 194 */
|
||||
/***/ (function(module, exports) {
|
||||
|
||||
module.exports = {"name":"yarn","installationMethod":"unknown","version":"1.22.4","license":"BSD-2-Clause","preferGlobal":true,"description":"📦🐈 Fast, reliable, and secure dependency management.","dependencies":{"@zkochan/cmd-shim":"^3.1.0","babel-runtime":"^6.26.0","bytes":"^3.0.0","camelcase":"^4.0.0","chalk":"^2.1.0","cli-table3":"^0.4.0","commander":"^2.9.0","death":"^1.0.0","debug":"^3.0.0","deep-equal":"^1.0.1","detect-indent":"^5.0.0","dnscache":"^1.0.1","glob":"^7.1.1","gunzip-maybe":"^1.4.0","hash-for-dep":"^1.2.3","imports-loader":"^0.8.0","ini":"^1.3.4","inquirer":"^6.2.0","invariant":"^2.2.0","is-builtin-module":"^2.0.0","is-ci":"^1.0.10","is-webpack-bundle":"^1.0.0","js-yaml":"^3.13.1","leven":"^2.0.0","loud-rejection":"^1.2.0","micromatch":"^2.3.11","mkdirp":"^0.5.1","node-emoji":"^1.6.1","normalize-url":"^2.0.0","npm-logical-tree":"^1.2.1","object-path":"^0.11.2","proper-lockfile":"^2.0.0","puka":"^1.0.0","read":"^1.0.7","request":"^2.87.0","request-capture-har":"^1.2.2","rimraf":"^2.5.0","semver":"^5.1.0","ssri":"^5.3.0","strip-ansi":"^4.0.0","strip-bom":"^3.0.0","tar-fs":"^1.16.0","tar-stream":"^1.6.1","uuid":"^3.0.1","v8-compile-cache":"^2.0.0","validate-npm-package-license":"^3.0.4","yn":"^2.0.0"},"devDependencies":{"babel-core":"^6.26.0","babel-eslint":"^7.2.3","babel-loader":"^6.2.5","babel-plugin-array-includes":"^2.0.3","babel-plugin-inline-import":"^3.0.0","babel-plugin-transform-builtin-extend":"^1.1.2","babel-plugin-transform-inline-imports-commonjs":"^1.0.0","babel-plugin-transform-runtime":"^6.4.3","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","babel-preset-stage-0":"^6.0.0","babylon":"^6.5.0","commitizen":"^2.9.6","cz-conventional-changelog":"^2.0.0","eslint":"^4.3.0","eslint-config-fb-strict":"^22.0.0","eslint-plugin-babel":"^5.0.0","eslint-plugin-flowtype":"^2.35.0","eslint-plugin-jasmine":"^2.6.2","eslint-plugin-jest":"^21.0.0","eslint-plugin-jsx-a11y":"^6.0.2","eslint-plugin-prefer-object-spread":"^1.2.1","eslint-plugin-prettier":"^2.1.2","eslint-plugin-react":"^7.1.0","eslint-plugin-relay":"^0.0.28","eslint-plugin-yarn-internal":"file:scripts/eslint-rules","execa":"^0.11.0","fancy-log":"^1.3.2","flow-bin":"^0.66.0","git-release-notes":"^3.0.0","gulp":"^4.0.0","gulp-babel":"^7.0.0","gulp-if":"^2.0.1","gulp-newer":"^1.0.0","gulp-plumber":"^1.0.1","gulp-sourcemaps":"^2.2.0","jest":"^22.4.4","jsinspect":"^0.12.6","minimatch":"^3.0.4","mock-stdin":"^0.3.0","prettier":"^1.5.2","string-replace-loader":"^2.1.1","temp":"^0.8.3","webpack":"^2.1.0-beta.25","yargs":"^6.3.0"},"resolutions":{"sshpk":"^1.14.2"},"engines":{"node":">=4.0.0"},"repository":"yarnpkg/yarn","bin":{"yarn":"./bin/yarn.js","yarnpkg":"./bin/yarn.js"},"scripts":{"build":"gulp build","build-bundle":"node ./scripts/build-webpack.js","build-chocolatey":"powershell ./scripts/build-chocolatey.ps1","build-deb":"./scripts/build-deb.sh","build-dist":"bash ./scripts/build-dist.sh","build-win-installer":"scripts\\build-windows-installer.bat","changelog":"git-release-notes $(git describe --tags --abbrev=0 $(git describe --tags --abbrev=0)^)..$(git describe --tags --abbrev=0) scripts/changelog.md","dupe-check":"yarn jsinspect ./src","lint":"eslint . && flow check","pkg-tests":"yarn --cwd packages/pkg-tests jest yarn.test.js","prettier":"eslint src __tests__ --fix","release-branch":"./scripts/release-branch.sh","test":"yarn lint && yarn test-only","test-only":"node --max_old_space_size=4096 node_modules/jest/bin/jest.js --verbose","test-only-debug":"node --inspect-brk --max_old_space_size=4096 node_modules/jest/bin/jest.js --runInBand --verbose","test-coverage":"node --max_old_space_size=4096 node_modules/jest/bin/jest.js --coverage --verbose","watch":"gulp watch","commit":"git-cz"},"jest":{"collectCoverageFrom":["src/**/*.js"],"testEnvironment":"node","modulePathIgnorePatterns":["__tests__/fixtures/","packages/pkg-tests/pkg-tests-fixtures","dist/"],"testPathIgnorePatterns":["__tests__/(fixtures|__mocks__)/","updates/","_(temp|mock|install|init|helpers).js$","packages/pkg-tests"]},"config":{"commitizen":{"path":"./node_modules/cz-conventional-changelog"}}}
|
||||
module.exports = {"name":"yarn","installationMethod":"unknown","version":"1.21.1","license":"BSD-2-Clause","preferGlobal":true,"description":"📦🐈 Fast, reliable, and secure dependency management.","dependencies":{"@zkochan/cmd-shim":"^3.1.0","babel-runtime":"^6.26.0","bytes":"^3.0.0","camelcase":"^4.0.0","chalk":"^2.1.0","cli-table3":"^0.4.0","commander":"^2.9.0","death":"^1.0.0","debug":"^3.0.0","deep-equal":"^1.0.1","detect-indent":"^5.0.0","dnscache":"^1.0.1","glob":"^7.1.1","gunzip-maybe":"^1.4.0","hash-for-dep":"^1.2.3","imports-loader":"^0.8.0","ini":"^1.3.4","inquirer":"^6.2.0","invariant":"^2.2.0","is-builtin-module":"^2.0.0","is-ci":"^1.0.10","is-webpack-bundle":"^1.0.0","js-yaml":"^3.13.1","leven":"^2.0.0","loud-rejection":"^1.2.0","micromatch":"^2.3.11","mkdirp":"^0.5.1","node-emoji":"^1.6.1","normalize-url":"^2.0.0","npm-logical-tree":"^1.2.1","object-path":"^0.11.2","proper-lockfile":"^2.0.0","puka":"^1.0.0","read":"^1.0.7","request":"^2.87.0","request-capture-har":"^1.2.2","rimraf":"^2.5.0","semver":"^5.1.0","ssri":"^5.3.0","strip-ansi":"^4.0.0","strip-bom":"^3.0.0","tar-fs":"^1.16.0","tar-stream":"^1.6.1","uuid":"^3.0.1","v8-compile-cache":"^2.0.0","validate-npm-package-license":"^3.0.4","yn":"^2.0.0"},"devDependencies":{"babel-core":"^6.26.0","babel-eslint":"^7.2.3","babel-loader":"^6.2.5","babel-plugin-array-includes":"^2.0.3","babel-plugin-inline-import":"^3.0.0","babel-plugin-transform-builtin-extend":"^1.1.2","babel-plugin-transform-inline-imports-commonjs":"^1.0.0","babel-plugin-transform-runtime":"^6.4.3","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","babel-preset-stage-0":"^6.0.0","babylon":"^6.5.0","commitizen":"^2.9.6","cz-conventional-changelog":"^2.0.0","eslint":"^4.3.0","eslint-config-fb-strict":"^22.0.0","eslint-plugin-babel":"^5.0.0","eslint-plugin-flowtype":"^2.35.0","eslint-plugin-jasmine":"^2.6.2","eslint-plugin-jest":"^21.0.0","eslint-plugin-jsx-a11y":"^6.0.2","eslint-plugin-prefer-object-spread":"^1.2.1","eslint-plugin-prettier":"^2.1.2","eslint-plugin-react":"^7.1.0","eslint-plugin-relay":"^0.0.28","eslint-plugin-yarn-internal":"file:scripts/eslint-rules","execa":"^0.11.0","fancy-log":"^1.3.2","flow-bin":"^0.66.0","git-release-notes":"^3.0.0","gulp":"^4.0.0","gulp-babel":"^7.0.0","gulp-if":"^2.0.1","gulp-newer":"^1.0.0","gulp-plumber":"^1.0.1","gulp-sourcemaps":"^2.2.0","jest":"^22.4.4","jsinspect":"^0.12.6","minimatch":"^3.0.4","mock-stdin":"^0.3.0","prettier":"^1.5.2","string-replace-loader":"^2.1.1","temp":"^0.8.3","webpack":"^2.1.0-beta.25","yargs":"^6.3.0"},"resolutions":{"sshpk":"^1.14.2"},"engines":{"node":">=4.0.0"},"repository":"yarnpkg/yarn","bin":{"yarn":"./bin/yarn.js","yarnpkg":"./bin/yarn.js"},"scripts":{"build":"gulp build","build-bundle":"node ./scripts/build-webpack.js","build-chocolatey":"powershell ./scripts/build-chocolatey.ps1","build-deb":"./scripts/build-deb.sh","build-dist":"bash ./scripts/build-dist.sh","build-win-installer":"scripts\\build-windows-installer.bat","changelog":"git-release-notes $(git describe --tags --abbrev=0 $(git describe --tags --abbrev=0)^)..$(git describe --tags --abbrev=0) scripts/changelog.md","dupe-check":"yarn jsinspect ./src","lint":"eslint . && flow check","pkg-tests":"yarn --cwd packages/pkg-tests jest yarn.test.js","prettier":"eslint src __tests__ --fix","release-branch":"./scripts/release-branch.sh","test":"yarn lint && yarn test-only","test-only":"node --max_old_space_size=4096 node_modules/jest/bin/jest.js --verbose","test-only-debug":"node --inspect-brk --max_old_space_size=4096 node_modules/jest/bin/jest.js --runInBand --verbose","test-coverage":"node --max_old_space_size=4096 node_modules/jest/bin/jest.js --coverage --verbose","watch":"gulp watch","commit":"git-cz"},"jest":{"collectCoverageFrom":["src/**/*.js"],"testEnvironment":"node","modulePathIgnorePatterns":["__tests__/fixtures/","packages/pkg-tests/pkg-tests-fixtures","dist/"],"testPathIgnorePatterns":["__tests__/(fixtures|__mocks__)/","updates/","_(temp|mock|install|init|helpers).js$","packages/pkg-tests"]},"config":{"commitizen":{"path":"./node_modules/cz-conventional-changelog"}}}
|
||||
|
||||
/***/ }),
|
||||
/* 195 */
|
||||
@ -69888,12 +69876,12 @@ function getRcConfigForFolder(cwd) {
|
||||
}
|
||||
|
||||
function loadRcFile(fileText, filePath) {
|
||||
var _parse = (0, (_lockfile || _load_lockfile()).parse)(fileText, filePath);
|
||||
var _parse = (0, (_lockfile || _load_lockfile()).parse)(fileText, 'yarnrc');
|
||||
|
||||
let values = _parse.object;
|
||||
|
||||
|
||||
if (filePath.match(/\.yml$/) && typeof values.yarnPath === 'string') {
|
||||
if (filePath.match(/\.yml$/)) {
|
||||
values = { 'yarn-path': values.yarnPath };
|
||||
}
|
||||
|
||||
@ -74860,20 +74848,7 @@ let run = exports.run = (() => {
|
||||
} else {
|
||||
let suggestion;
|
||||
|
||||
for (var _iterator9 = scripts.keys(), _isArray9 = Array.isArray(_iterator9), _i9 = 0, _iterator9 = _isArray9 ? _iterator9 : _iterator9[Symbol.iterator]();;) {
|
||||
var _ref16;
|
||||
|
||||
if (_isArray9) {
|
||||
if (_i9 >= _iterator9.length) break;
|
||||
_ref16 = _iterator9[_i9++];
|
||||
} else {
|
||||
_i9 = _iterator9.next();
|
||||
if (_i9.done) break;
|
||||
_ref16 = _i9.value;
|
||||
}
|
||||
|
||||
const commandName = _ref16;
|
||||
|
||||
for (const commandName in scripts) {
|
||||
const steps = leven(commandName, action);
|
||||
if (steps < 2) {
|
||||
suggestion = commandName;
|
||||
@ -74958,19 +74933,19 @@ let run = exports.run = (() => {
|
||||
|
||||
const printedCommands = new Map();
|
||||
|
||||
for (var _iterator10 = pkgCommands, _isArray10 = Array.isArray(_iterator10), _i10 = 0, _iterator10 = _isArray10 ? _iterator10 : _iterator10[Symbol.iterator]();;) {
|
||||
var _ref17;
|
||||
for (var _iterator9 = pkgCommands, _isArray9 = Array.isArray(_iterator9), _i9 = 0, _iterator9 = _isArray9 ? _iterator9 : _iterator9[Symbol.iterator]();;) {
|
||||
var _ref16;
|
||||
|
||||
if (_isArray10) {
|
||||
if (_i10 >= _iterator10.length) break;
|
||||
_ref17 = _iterator10[_i10++];
|
||||
if (_isArray9) {
|
||||
if (_i9 >= _iterator9.length) break;
|
||||
_ref16 = _iterator9[_i9++];
|
||||
} else {
|
||||
_i10 = _iterator10.next();
|
||||
if (_i10.done) break;
|
||||
_ref17 = _i10.value;
|
||||
_i9 = _iterator9.next();
|
||||
if (_i9.done) break;
|
||||
_ref16 = _i9.value;
|
||||
}
|
||||
|
||||
const pkgCommand = _ref17;
|
||||
const pkgCommand = _ref16;
|
||||
|
||||
const action = scripts.get(pkgCommand);
|
||||
invariant(action, 'Action must exists');
|
||||
@ -76101,11 +76076,6 @@ class TarballFetcher extends (_baseFetcher || _load_baseFetcher()).default {
|
||||
chown: false, // don't chown. just leave as it is
|
||||
map: header => {
|
||||
header.mtime = now;
|
||||
if (header.linkname) {
|
||||
const basePath = path.posix.dirname(path.join('/', header.name));
|
||||
const jailPath = path.posix.join(basePath, header.linkname);
|
||||
header.linkname = path.posix.relative('/', jailPath);
|
||||
}
|
||||
return header;
|
||||
},
|
||||
fs: patchedFs
|
||||
@ -78439,11 +78409,6 @@ class RequestManager {
|
||||
rejectNext(err);
|
||||
};
|
||||
|
||||
const rejectWithoutUrl = function rejectWithoutUrl(err) {
|
||||
err.message = err.message;
|
||||
rejectNext(err);
|
||||
};
|
||||
|
||||
const queueForRetry = reason => {
|
||||
const attempts = params.retryAttempts || 0;
|
||||
if (attempts >= this.maxRetryAttempts - 1) {
|
||||
@ -78499,11 +78464,6 @@ class RequestManager {
|
||||
}
|
||||
}
|
||||
|
||||
if (res.statusCode === 401 && res.caseless && res.caseless.get('server') === 'GitHub.com') {
|
||||
const message = `${res.body.message}. If using GITHUB_TOKEN in your env, check that it is valid.`;
|
||||
rejectWithoutUrl(new Error(this.reporter.lang('unauthorizedResponse', res.caseless.get('server'), message)));
|
||||
}
|
||||
|
||||
if (res.statusCode === 401 && res.headers['www-authenticate']) {
|
||||
const authMethods = res.headers['www-authenticate'].split(/,\s*/).map(s => s.toLowerCase());
|
||||
|
||||
@ -97006,14 +96966,12 @@ function _load_asyncToGenerator() {
|
||||
|
||||
let run = exports.run = (() => {
|
||||
var _ref = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (config, reporter, flags, args) {
|
||||
const installVersion = flags[`2`] ? `berry` : flags.install;
|
||||
|
||||
if (installVersion) {
|
||||
if (flags.install) {
|
||||
const lockfilePath = path.resolve(config.cwd, 'yarn.lock');
|
||||
if (!(yield (_fs || _load_fs()).exists(lockfilePath))) {
|
||||
yield (_fs || _load_fs()).writeFile(lockfilePath, '');
|
||||
}
|
||||
yield (_child || _load_child()).spawn((_constants || _load_constants()).NODE_BIN_PATH, [process.argv[1], 'policies', 'set-version', installVersion], {
|
||||
yield (_child || _load_child()).spawn((_constants || _load_constants()).NODE_BIN_PATH, [process.argv[1], 'policies', 'set-version', flags.install], {
|
||||
stdio: 'inherit',
|
||||
cwd: config.cwd
|
||||
});
|
||||
@ -97308,7 +97266,6 @@ function setFlags(commander) {
|
||||
commander.option('-y, --yes', 'use default options');
|
||||
commander.option('-p, --private', 'use default options and private true');
|
||||
commander.option('-i, --install <value>', 'install a specific Yarn release');
|
||||
commander.option('-2', 'generates the project using Yarn 2');
|
||||
}
|
||||
|
||||
function hasWrapper(commander, args) {
|
||||
@ -98297,7 +98254,6 @@ var _buildSubCommands = (0, (_buildSubCommands2 || _load_buildSubCommands()).def
|
||||
|
||||
let bundleUrl;
|
||||
let bundleVersion;
|
||||
let isV2 = false;
|
||||
|
||||
if (range === 'nightly' || range === 'nightlies') {
|
||||
bundleUrl = 'https://nightly.yarnpkg.com/latest.js';
|
||||
@ -98305,18 +98261,10 @@ var _buildSubCommands = (0, (_buildSubCommands2 || _load_buildSubCommands()).def
|
||||
} else if (range === 'berry' || range === 'v2' || range === '2') {
|
||||
bundleUrl = 'https://github.com/yarnpkg/berry/raw/master/packages/berry-cli/bin/berry.js';
|
||||
bundleVersion = 'berry';
|
||||
isV2 = true;
|
||||
} else {
|
||||
let releases = [];
|
||||
|
||||
try {
|
||||
releases = yield fetchReleases(config, {
|
||||
includePrereleases: allowRc
|
||||
});
|
||||
} catch (e) {
|
||||
reporter.error(e.message);
|
||||
return;
|
||||
}
|
||||
const releases = yield fetchReleases(config, {
|
||||
includePrereleases: allowRc
|
||||
});
|
||||
|
||||
const release = releases.find(function (release) {
|
||||
// $FlowFixMe
|
||||
@ -98337,6 +98285,7 @@ var _buildSubCommands = (0, (_buildSubCommands2 || _load_buildSubCommands()).def
|
||||
reporter.log(`Downloading ${chalk.green(bundleUrl)}...`);
|
||||
|
||||
const bundle = yield fetchBundle(config, bundleUrl);
|
||||
const rc = (0, (_rc || _load_rc()).getRcConfigForFolder)(config.lockfileFolder);
|
||||
|
||||
const yarnPath = path.resolve(config.lockfileFolder, `.yarn/releases/yarn-${bundleVersion}.js`);
|
||||
reporter.log(`Saving it into ${chalk.magenta(yarnPath)}...`);
|
||||
@ -98344,22 +98293,10 @@ var _buildSubCommands = (0, (_buildSubCommands2 || _load_buildSubCommands()).def
|
||||
yield (_fs || _load_fs()).writeFile(yarnPath, bundle);
|
||||
yield (_fs || _load_fs()).chmod(yarnPath, 0o755);
|
||||
|
||||
const targetPath = path.relative(config.lockfileFolder, yarnPath).replace(/\\/g, '/');
|
||||
|
||||
if (isV2) {
|
||||
const rcPath = `${config.lockfileFolder}/.yarnrc.yml`;
|
||||
reporter.log(`Updating ${chalk.magenta(rcPath)}...`);
|
||||
|
||||
yield (_fs || _load_fs()).writeFilePreservingEol(rcPath, `yarnPath: ${JSON.stringify(targetPath)}\n`);
|
||||
} else {
|
||||
const rcPath = `${config.lockfileFolder}/.yarnrc`;
|
||||
reporter.log(`Updating ${chalk.magenta(rcPath)}...`);
|
||||
|
||||
const rc = (0, (_rc || _load_rc()).getRcConfigForFolder)(config.lockfileFolder);
|
||||
rc['yarn-path'] = targetPath;
|
||||
|
||||
yield (_fs || _load_fs()).writeFilePreservingEol(rcPath, `${(0, (_lockfile || _load_lockfile()).stringify)(rc)}\n`);
|
||||
}
|
||||
const rcPath = `${config.lockfileFolder}/.yarnrc`;
|
||||
reporter.log(`Updating ${chalk.magenta(rcPath)}...`);
|
||||
rc['yarn-path'] = path.relative(config.lockfileFolder, yarnPath);
|
||||
yield (_fs || _load_fs()).writeFilePreservingEol(rcPath, `${(0, (_lockfile || _load_lockfile()).stringify)(rc)}\n`);
|
||||
|
||||
reporter.log(`Done!`);
|
||||
})();
|
||||
@ -99682,11 +99619,11 @@ let run = exports.run = (() => {
|
||||
throw new (_errors || _load_errors()).MessageError(reporter.lang('workspaceRootNotFound', config.cwd));
|
||||
}
|
||||
|
||||
if (args.length < 1) {
|
||||
if (flags.originalArgs < 1) {
|
||||
throw new (_errors || _load_errors()).MessageError(reporter.lang('workspaceMissingWorkspace'));
|
||||
}
|
||||
|
||||
if (args.length < 2) {
|
||||
if (flags.originalArgs < 2) {
|
||||
throw new (_errors || _load_errors()).MessageError(reporter.lang('workspaceMissingCommand'));
|
||||
}
|
||||
|
||||
@ -99695,7 +99632,7 @@ let run = exports.run = (() => {
|
||||
|
||||
const workspaces = yield config.resolveWorkspaces(workspaceRootFolder, manifest);
|
||||
|
||||
var _ref2 = args || [];
|
||||
var _ref2 = flags.originalArgs || [];
|
||||
|
||||
const workspaceName = _ref2[0],
|
||||
rest = _ref2.slice(1);
|
||||
@ -99881,23 +99818,28 @@ let runScript = exports.runScript = (() => {
|
||||
const workspaces = yield config.resolveWorkspaces(workspaceRootFolder, manifest);
|
||||
|
||||
try {
|
||||
var _ref6 = flags.originalArgs || [];
|
||||
|
||||
const _ = _ref6[0],
|
||||
rest = _ref6.slice(1);
|
||||
|
||||
for (var _iterator4 = Object.keys(workspaces), _isArray4 = Array.isArray(_iterator4), _i4 = 0, _iterator4 = _isArray4 ? _iterator4 : _iterator4[Symbol.iterator]();;) {
|
||||
var _ref6;
|
||||
var _ref7;
|
||||
|
||||
if (_isArray4) {
|
||||
if (_i4 >= _iterator4.length) break;
|
||||
_ref6 = _iterator4[_i4++];
|
||||
_ref7 = _iterator4[_i4++];
|
||||
} else {
|
||||
_i4 = _iterator4.next();
|
||||
if (_i4.done) break;
|
||||
_ref6 = _i4.value;
|
||||
_ref7 = _i4.value;
|
||||
}
|
||||
|
||||
const workspaceName = _ref6;
|
||||
const workspaceName = _ref7;
|
||||
const loc = workspaces[workspaceName].loc;
|
||||
|
||||
reporter.log(`${os.EOL}> ${workspaceName}`);
|
||||
yield (_child || _load_child()).spawn((_constants2 || _load_constants2()).NODE_BIN_PATH, [(_constants2 || _load_constants2()).YARN_BIN_PATH, 'run', ...args], {
|
||||
yield (_child || _load_child()).spawn((_constants2 || _load_constants2()).NODE_BIN_PATH, [(_constants2 || _load_constants2()).YARN_BIN_PATH, ...rest], {
|
||||
stdio: 'inherit',
|
||||
cwd: loc
|
||||
});
|
||||
@ -100117,11 +100059,7 @@ let main = exports.main = (() => {
|
||||
commandName = 'install';
|
||||
isKnownCommand = true;
|
||||
}
|
||||
if (commandName === 'set' && args[0] === 'version') {
|
||||
commandName = 'policies';
|
||||
args.splice(0, 1, 'set-version');
|
||||
isKnownCommand = true;
|
||||
}
|
||||
|
||||
if (!isKnownCommand) {
|
||||
// if command is not recognized, then set default to `run`
|
||||
args.unshift(commandName);
|
||||
@ -100132,20 +100070,15 @@ let main = exports.main = (() => {
|
||||
let warnAboutRunDashDash = false;
|
||||
// we are using "yarn <script> -abc", "yarn run <script> -abc", or "yarn node -abc", we want -abc
|
||||
// to be script options, not yarn options
|
||||
|
||||
// PROXY_COMMANDS is a map of command name to the number of preservedArgs
|
||||
const PROXY_COMMANDS = {
|
||||
run: 1, // yarn run {command}
|
||||
create: 1, // yarn create {project}
|
||||
node: 0, // yarn node
|
||||
workspaces: 1, // yarn workspaces {command}
|
||||
workspace: 2 // yarn workspace {package} {command}
|
||||
};
|
||||
if (PROXY_COMMANDS.hasOwnProperty(commandName)) {
|
||||
const PROXY_COMMANDS = new Set([`run`, `create`, `node`]);
|
||||
if (PROXY_COMMANDS.has(commandName)) {
|
||||
if (endArgs.length === 0) {
|
||||
// $FlowFixMe doesn't like that PROXY_COMMANDS doesn't have keys for all commands.
|
||||
let preservedArgs = PROXY_COMMANDS[commandName];
|
||||
|
||||
let preservedArgs = 0;
|
||||
// the "run" and "create" command take one argument that we want to parse as usual (the
|
||||
// script/package name), hence the splice(1)
|
||||
if (command === (_index3 || _load_index3()).default.run || command === (_index3 || _load_index3()).default.create) {
|
||||
preservedArgs += 1;
|
||||
}
|
||||
// If the --into option immediately follows the command (or the script name in the "run/create"
|
||||
// case), we parse them as regular options so that we can cd into them
|
||||
if (args[preservedArgs] === `--into`) {
|
||||
@ -100157,6 +100090,7 @@ let main = exports.main = (() => {
|
||||
}
|
||||
}
|
||||
|
||||
(_commander || _load_commander()).default.originalArgs = args;
|
||||
args = [...preCommandArgs, ...args];
|
||||
|
||||
command.setFlags((_commander || _load_commander()).default);
|
||||
@ -100598,11 +100532,6 @@ let start = (() => {
|
||||
const opts = { stdio: 'inherit', env: Object.assign({}, process.env, { YARN_IGNORE_PATH: 1 }) };
|
||||
let exitCode = 0;
|
||||
|
||||
process.on(`SIGINT`, function () {
|
||||
// We don't want SIGINT to kill our process; we want it to kill the
|
||||
// innermost process, whose end will cause our own to exit.
|
||||
});
|
||||
|
||||
try {
|
||||
if (yarnPath.endsWith(`.js`)) {
|
||||
exitCode = yield (0, (_child || _load_child()).spawnp)(process.execPath, [yarnPath, ...argv], opts);
|
||||
@ -104994,7 +104923,6 @@ const messages = {
|
||||
errorExtractingTarball: 'Extracting tar content of $1 failed, the file appears to be corrupt: $0',
|
||||
updateInstalling: 'Installing $0...',
|
||||
hostedGitResolveError: 'Error connecting to repository. Please, check the url.',
|
||||
unauthorizedResponse: 'Received a 401 from $0. $1',
|
||||
|
||||
unknownFetcherFor: 'Unknown fetcher for $0',
|
||||
|
||||
@ -106869,7 +106797,7 @@ const semver = __webpack_require__(22);
|
||||
const path = __webpack_require__(0);
|
||||
const url = __webpack_require__(24);
|
||||
|
||||
const VALID_BIN_KEYS = /^(?!\.{0,2}$)[a-z0-9._-]+$/i;
|
||||
const VALID_BIN_KEYS = /^[a-z0-9_-]+$/i;
|
||||
|
||||
const LICENSE_RENAMES = {
|
||||
'MIT/X11': 'MIT',
|
||||
@ -107732,11 +107660,7 @@ function parseRcPaths(paths, parser) {
|
||||
try {
|
||||
return parser((0, (_fs || _load_fs()).readFileSync)(path).toString(), path);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' || error.code === 'EISDIR') {
|
||||
return {};
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}));
|
||||
}
|
2
.yarnrc
2
.yarnrc
@ -2,4 +2,4 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
yarn-path ".yarn/releases/yarn-1.22.4.js"
|
||||
yarn-path ".yarn/releases/yarn-1.21.1.js"
|
||||
|
@ -2,6 +2,7 @@ package(default_visibility = ["//visibility:public"])
|
||||
|
||||
exports_files([
|
||||
"LICENSE",
|
||||
"protractor-perf.conf.js",
|
||||
"karma-js.conf.js",
|
||||
"browser-providers.conf.js",
|
||||
"scripts/ci/track-payload-size.sh",
|
||||
|
1012
CHANGELOG.md
1012
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -42,9 +42,7 @@ Please consider what kind of change it is:
|
||||
|
||||
* For a **Major Feature**, first open an issue and outline your proposal so that it can be
|
||||
discussed. This will also allow us to better coordinate our efforts, prevent duplication of work,
|
||||
and help you to craft the change so that it is successfully accepted into the project. **Note**:
|
||||
Adding a new topic to the documentation, or significantly re-writing a topic, counts as a major
|
||||
feature.
|
||||
and help you to craft the change so that it is successfully accepted into the project.
|
||||
* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
|
||||
|
||||
## <a name="submit"></a> Submission Guidelines
|
||||
@ -238,7 +236,6 @@ There are currently a few exceptions to the "use package name" rule:
|
||||
* **docs-infra**: used for docs-app (angular.io) related changes within the /aio directory of the
|
||||
repo
|
||||
* **dev-infra**: used for dev-infra related changes within the directories /scripts, /tools and /dev-infra
|
||||
* **migrations**: used for changes to the `ng update` migrations.
|
||||
* **ngcc**: used for changes to the [Angular Compatibility Compiler](./packages/compiler-cli/ngcc/README.md)
|
||||
* **ve**: used for changes specific to ViewEngine (legacy compiler/renderer).
|
||||
* none/empty string: useful for `style`, `test` and `refactor` changes that are done across all
|
||||
|
15
WORKSPACE
15
WORKSPACE
@ -8,8 +8,8 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
# Fetch rules_nodejs so we can install our npm dependencies
|
||||
http_archive(
|
||||
name = "build_bazel_rules_nodejs",
|
||||
sha256 = "84abf7ac4234a70924628baa9a73a5a5cbad944c4358cf9abdb4aab29c9a5b77",
|
||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.7.0/rules_nodejs-1.7.0.tar.gz"],
|
||||
sha256 = "2eca5b934dee47b5ff304f502ae187c40ec4e33e12bcbce872a2eeb786e23269",
|
||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.4.1/rules_nodejs-1.4.1.tar.gz"],
|
||||
)
|
||||
|
||||
# Check the rules_nodejs version and download npm dependencies
|
||||
@ -17,7 +17,7 @@ http_archive(
|
||||
# assert on that.
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "check_rules_nodejs_version", "node_repositories", "yarn_install")
|
||||
|
||||
check_rules_nodejs_version(minimum_version_string = "1.7.0")
|
||||
check_rules_nodejs_version(minimum_version_string = "1.4.1")
|
||||
|
||||
# Setup the Node.js toolchain
|
||||
node_repositories(
|
||||
@ -64,7 +64,7 @@ load("@io_bazel_rules_webtesting//web:repositories.bzl", "web_test_repositories"
|
||||
|
||||
web_test_repositories()
|
||||
|
||||
load("//dev-infra/browsers:browser_repositories.bzl", "browser_repositories")
|
||||
load("//tools/browsers:browser_repositories.bzl", "browser_repositories")
|
||||
|
||||
browser_repositories()
|
||||
|
||||
@ -91,18 +91,17 @@ rbe_autoconfig(
|
||||
# Need to specify a base container digest in order to ensure that we can use the checked-in
|
||||
# platform configurations for the "ubuntu16_04" image. Otherwise the autoconfig rule would
|
||||
# need to pull the image and run it in order determine the toolchain configuration. See:
|
||||
# https://github.com/bazelbuild/bazel-toolchains/blob/3.2.0/configs/ubuntu16_04_clang/versions.bzl
|
||||
base_container_digest = "sha256:5e750dd878df9fcf4e185c6f52b9826090f6e532b097f286913a428290622332",
|
||||
# https://github.com/bazelbuild/bazel-toolchains/blob/1.1.2/configs/ubuntu16_04_clang/versions.bzl
|
||||
base_container_digest = "sha256:1ab40405810effefa0b2f45824d6d608634ccddbf06366760c341ef6fbead011",
|
||||
# Note that if you change the `digest`, you might also need to update the
|
||||
# `base_container_digest` to make sure marketplace.gcr.io/google/rbe-ubuntu16-04-webtest:<digest>
|
||||
# and marketplace.gcr.io/google/rbe-ubuntu16-04:<base_container_digest> have
|
||||
# the same Clang and JDK installed. Clang is needed because of the dependency on
|
||||
# @com_google_protobuf. Java is needed for the Bazel's test executor Java tool.
|
||||
digest = "sha256:f743114235a43355bf8324e2ba0fa6a597236fe06f7bc99aaa9ac703631c306b",
|
||||
digest = "sha256:0b8fa87db4b8e5366717a7164342a029d1348d2feea7ecc4b18c780bc2507059",
|
||||
env = clang_env(),
|
||||
registry = "marketplace.gcr.io",
|
||||
# We can't use the default "ubuntu16_04" RBE image provided by the autoconfig because we need
|
||||
# a specific Linux kernel that comes with "libx11" in order to run headless browser tests.
|
||||
repository = "google/rbe-ubuntu16-04-webtest",
|
||||
use_checked_in_confs = "Force",
|
||||
)
|
||||
|
@ -18,8 +18,8 @@ Here are the most important tasks you might need to use:
|
||||
|
||||
* `yarn build` - create a production build of the application (after installing dependencies, boilerplate, etc).
|
||||
* `yarn build-local` - same as `build`, but use `setup-local` instead of `setup`.
|
||||
* `yarn build-local-with-viewengine` - same as `build-local`, but in addition also turns on `ViewEngine` (pre-Ivy) mode in aio.
|
||||
(Note: To turn on `ViewEngine` mode in docs examples, see `yarn boilerplate:add:viewengine` below.)
|
||||
* `yarn build-local-with-viewengine` - same as `build-local`, but in addition also turns on `ViewEngine` mode in aio.
|
||||
(Note: Docs examples run in `ViewEngine` mode by default. To turn on `ivy` mode in examples, see `yarn boilerplate:add` below.)
|
||||
|
||||
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
|
||||
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
|
||||
@ -34,7 +34,7 @@ Here are the most important tasks you might need to use:
|
||||
* `yarn docs-test` - run the unit tests for the doc generation code.
|
||||
|
||||
* `yarn boilerplate:add` - generate all the boilerplate code for the examples, so that they can be run locally.
|
||||
* `yarn boilerplate:add:viewengine` - same as `boilerplate:add` but also turns on `ViewEngine` (pre-Ivy) mode.
|
||||
* `yarn boilerplate:add:ivy` - same as `boilerplate:add` but also turns on `ivy` mode.
|
||||
|
||||
* `yarn boilerplate:remove` - remove all the boilerplate code that was added via `yarn boilerplate:add`.
|
||||
* `yarn generate-stackblitz` - generate the stackblitz files that are used by the `live-example` tags in the docs.
|
||||
@ -44,7 +44,7 @@ Here are the most important tasks you might need to use:
|
||||
- `--setup`: generate boilerplate, force webdriver update & other setup, then run tests.
|
||||
- `--local`: run e2e tests with the local version of Angular contained in the "dist" folder.
|
||||
_Requires `--setup` in order to take effect._
|
||||
- `--viewengine`: run e2e tests in `ViewEngine` (pre-Ivy) mode.
|
||||
- `--ivy`: run e2e tests in `ivy` mode.
|
||||
- `--filter=foo`: limit e2e tests to those containing the word "foo".
|
||||
|
||||
> **Note for Windows users**
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Image metadata and config
|
||||
FROM debian:buster
|
||||
FROM debian:stretch
|
||||
|
||||
LABEL name="angular.io PR preview" \
|
||||
description="This image implements the PR preview functionality for angular.io." \
|
||||
@ -37,9 +37,9 @@ ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||
ARG AIO_SIGNIFICANT_FILES_PATTERN='^(?:aio|packages)/(?!.*[._]spec\\.[jt]s$)'
|
||||
ARG TEST_AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN
|
||||
ARG AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME
|
||||
ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG AIO_ARTIFACT_MAX_SIZE=26214400
|
||||
ARG TEST_AIO_ARTIFACT_MAX_SIZE=200
|
||||
ARG AIO_PREVIEW_SERVER_PORT=3000
|
||||
@ -72,29 +72,24 @@ RUN mkdir /var/log/aio
|
||||
|
||||
|
||||
# Add extra package sources
|
||||
RUN apt-get update -y && apt-get install -y curl=7.64.0-4+deb10u1
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_12.x | bash -
|
||||
RUN apt-get update -y && apt-get install -y curl
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_10.x | bash -
|
||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
RUN echo "deb http://ftp.debian.org/debian stretch-backports main" | tee /etc/apt/sources.list.d/backports.list
|
||||
|
||||
|
||||
# Install packages
|
||||
# NOTE: Some packages (such as `nginx`, `nodejs`, `openssl`) make older versions unavailable on the
|
||||
# repositories, so we cannot pin to specific versions for these packages :(
|
||||
# See for example:
|
||||
# - https://github.com/nodesource/distributions/issues/33
|
||||
# - https://askubuntu.com/questions/715104/how-can-i-downgrade-openssl-via-apt-get
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
cron=3.0pl1-134+deb10u1 \
|
||||
dnsmasq=2.80-1 \
|
||||
nano=3.2-3 \
|
||||
nginx \
|
||||
nodejs \
|
||||
openssl \
|
||||
rsyslog=8.1901.0-1 \
|
||||
vim=2:8.1.0875-5 \
|
||||
yarn=1.22.4-1
|
||||
RUN yarn global add pm2@4.4.0
|
||||
cron=3.0pl1-128+deb9u1 \
|
||||
dnsmasq=2.76-5+deb9u2 \
|
||||
nano=2.7.4-1 \
|
||||
nginx=1.10.3-1+deb9u2 \
|
||||
nodejs=10.15.3-1nodesource1 \
|
||||
openssl=1.1.0j-1~deb9u1 \
|
||||
rsyslog=8.24.0-1 \
|
||||
yarn=1.15.2-1
|
||||
RUN yarn global add pm2@3.5.0
|
||||
|
||||
|
||||
# Set up log rotation
|
||||
@ -167,7 +162,8 @@ RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
|
||||
|
||||
# Set up the Node.js scripts
|
||||
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn --cwd="$AIO_SCRIPTS_JS_DIR/" install --production --frozen-lockfile
|
||||
WORKDIR $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn install --production --frozen-lockfile
|
||||
|
||||
|
||||
# Set up health check
|
||||
|
@ -35,7 +35,6 @@ export class BuildCleaner {
|
||||
]);
|
||||
} catch (error) {
|
||||
this.logger.error('ERROR:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import * as express from 'express';
|
||||
import {promisify} from 'util';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
/**
|
||||
@ -12,7 +13,7 @@ export async function respondWithError(res: express.Response, err: any): Promise
|
||||
}
|
||||
|
||||
res.status(err.status);
|
||||
return new Promise(resolve => res.end(err.message, resolve));
|
||||
await promisify(res.end.bind(res))(err.message);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,7 +93,7 @@ class Helper {
|
||||
return fs.readFileSync(absFilePath, 'utf8');
|
||||
}
|
||||
|
||||
public runCmd(cmd: string, opts: cp.ExecOptions = {}): Promise<CmdResult> {
|
||||
public runCmd(cmd: string, opts: cp.ExecFileOptions = {}): Promise<CmdResult> {
|
||||
return new Promise(resolve => {
|
||||
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
|
||||
this.createCleanUpFn(() => proc.kill());
|
||||
@ -101,7 +101,7 @@ class Helper {
|
||||
}
|
||||
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory): void {
|
||||
Object.entries(this.portPerScheme).forEach(([scheme, port]) => suiteFactory(scheme, port));
|
||||
Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme]));
|
||||
}
|
||||
|
||||
public verifyResponse(status: number, regex: string | RegExp = /^/): VerifyCmdResultFn {
|
||||
|
@ -15,7 +15,7 @@ describe(`nginx`, () => {
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
it('should redirect HTTP to HTTPS', async () => {
|
||||
it('should redirect HTTP to HTTPS', done => {
|
||||
const httpHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTP}`;
|
||||
const httpsHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTPS}`;
|
||||
const urlMap = {
|
||||
@ -24,15 +24,16 @@ describe(`nginx`, () => {
|
||||
[`http://foo.${httpHost}/`]: `https://foo.${httpsHost}/`,
|
||||
};
|
||||
|
||||
const verifyRedirection = async (fromUrl: string, toUrl: string) => {
|
||||
const result = await h.runCmd(`curl -i ${fromUrl}`);
|
||||
const verifyRedirection = (httpUrl: string) => h.runCmd(`curl -i ${httpUrl}`).then(result => {
|
||||
h.verifyResponse(307)(result);
|
||||
|
||||
const headers = result.stdout.split(/(?:\r?\n){2,}/)[0];
|
||||
expect(headers).toContain(`Location: ${toUrl}`);
|
||||
};
|
||||
expect(headers).toContain(`Location: ${urlMap[httpUrl]}`);
|
||||
});
|
||||
|
||||
await Promise.all(Object.entries(urlMap).map(urls => verifyRedirection(...urls)));
|
||||
Promise.
|
||||
all(Object.keys(urlMap).map(verifyRedirection)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
@ -61,15 +62,15 @@ describe(`nginx`, () => {
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html', async () => {
|
||||
it('should return /index.html', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
@ -89,11 +90,12 @@ describe(`nginx`, () => {
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js', async () => {
|
||||
it('should return /foo/bar.js', done => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex));
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
@ -109,46 +111,47 @@ describe(`nginx`, () => {
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 403 for directories', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 403 for directories', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths to files', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404));
|
||||
it('should respond with 404 for unknown paths to files', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', async () => {
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown PRs/SHAs', async () => {
|
||||
it('should respond with 404 for unknown PRs/SHAs', done => {
|
||||
const otherPr = 54321;
|
||||
const otherShortSha = computeShortSha('8'.repeat(40));
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}9.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherShortSha}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the subdomain format is wrong', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 404 if the subdomain format is wrong', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
@ -157,25 +160,26 @@ describe(`nginx`, () => {
|
||||
h.runCmd(`curl -iL ${scheme}://${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404));
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', async () => {
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -227,23 +231,23 @@ describe(`nginx`, () => {
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -287,28 +291,29 @@ describe(`nginx`, () => {
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
it('should disallow non-POST requests', done => {
|
||||
const url = `${scheme}://${host}/circle-build`;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
|
||||
then(h.verifyResponse(400, /Incorrect body content. Expected JSON/));
|
||||
it('should pass requests through to the preview server', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
|
||||
then(h.verifyResponse(400, /Incorrect body content. Expected JSON/)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncircle-build/`).then(h.verifyResponse(404)),
|
||||
@ -317,7 +322,7 @@ describe(`nginx`, () => {
|
||||
h.runCmd(`${cmdPrefix}/circle-buildnfoo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/pr`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/42`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -327,33 +332,38 @@ describe(`nginx`, () => {
|
||||
const url = `${scheme}://${host}/pr-updated`;
|
||||
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
await Promise.all([
|
||||
it('should disallow non-POST requests', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST --header "Content-Type: application/json" ${url}`).
|
||||
then(h.verifyResponse(400, /Missing or empty 'number' field/));
|
||||
it('should pass requests through to the preview server', done => {
|
||||
const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`;
|
||||
|
||||
const cmd1 = `${cmdPrefix} ${url}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -364,7 +374,7 @@ describe(`nginx`, () => {
|
||||
beforeEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
return h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -105,8 +105,8 @@ describe('preview-server', () => {
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
const curl = makeCurl(`${host}/circle-build`);
|
||||
|
||||
const curl = makeCurl(`${host}/circle-build`);
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
@ -189,7 +189,8 @@ describe('preview-server', () => {
|
||||
});
|
||||
|
||||
it('should respond with 201 if a new public build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).then(h.verifyResponse(201));
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER))
|
||||
.then(h.verifyResponse(201));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER }).toExistAsABuild();
|
||||
});
|
||||
|
||||
@ -198,7 +199,7 @@ describe('preview-server', () => {
|
||||
expect({ prNum: PrNums.TRUST_CHECK_UNTRUSTED, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
[true, false].forEach(isPublic => {
|
||||
[true].forEach(isPublic => {
|
||||
const build = isPublic ? BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const prNum = isPublic ? PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
const label = isPublic ? 'public' : 'non-public';
|
||||
@ -363,23 +364,23 @@ describe('preview-server', () => {
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -425,18 +426,18 @@ describe('preview-server', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const mockPayload = JSON.stringify({number: 1}); // MockExternalApiFlags.TRUST_CHECK_ACTIVE_TRUSTED_USER });
|
||||
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" ${host}`;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
@ -550,10 +551,10 @@ describe('preview-server', () => {
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for requests to unknown URLs', async () => {
|
||||
it('should respond with 404 for requests to unknown URLs', done => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
@ -561,7 +562,7 @@ describe('preview-server', () => {
|
||||
h.runCmd(`curl -iLX POST ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -14,41 +14,42 @@
|
||||
"predev": "yarn build || true",
|
||||
"dev": "run-p ~~build-watch ~~test-watch",
|
||||
"lint": "tslint --project tsconfig.json",
|
||||
"pretest": "run-s build lint",
|
||||
"pretest": "yarn build",
|
||||
"test": "yarn ~~test-only",
|
||||
"pretest-watch": "yarn pretest",
|
||||
"test-watch": "yarn ~~test-watch",
|
||||
"~~build": "tsc",
|
||||
"~~build-watch": "yarn ~~build --watch",
|
||||
"pre~~test-only": "yarn lint",
|
||||
"~~test-only": "node dist/test",
|
||||
"~~test-watch": "nodemon --delay 1 --exec \"yarn ~~test-only\" --watch dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"delete-empty": "^3.0.0",
|
||||
"express": "^4.17.1",
|
||||
"jasmine": "^3.5.0",
|
||||
"nock": "^12.0.3",
|
||||
"node-fetch": "^2.6.0",
|
||||
"shelljs": "^0.8.4",
|
||||
"source-map-support": "^0.5.19",
|
||||
"tar-stream": "^2.1.2",
|
||||
"tslib": "^1.11.1"
|
||||
"body-parser": "^1.18.3",
|
||||
"delete-empty": "^2.0.0",
|
||||
"express": "^4.16.3",
|
||||
"jasmine": "^3.2.0",
|
||||
"nock": "^9.6.1",
|
||||
"node-fetch": "^2.2.0",
|
||||
"shelljs": "^0.8.2",
|
||||
"source-map-support": "^0.5.9",
|
||||
"tar-stream": "^1.6.1",
|
||||
"tslib": "^1.10.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/body-parser": "^1.19.0",
|
||||
"@types/express": "^4.17.6",
|
||||
"@types/jasmine": "^3.5.10",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^13.13.2",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
"@types/shelljs": "^0.8.7",
|
||||
"@types/supertest": "^2.0.8",
|
||||
"nodemon": "^2.0.3",
|
||||
"@types/body-parser": "^1.17.0",
|
||||
"@types/express": "^4.16.0",
|
||||
"@types/jasmine": "^2.8.8",
|
||||
"@types/nock": "^9.3.0",
|
||||
"@types/node": "^10.9.2",
|
||||
"@types/node-fetch": "^2.1.2",
|
||||
"@types/shelljs": "^0.8.0",
|
||||
"@types/supertest": "^2.0.5",
|
||||
"nodemon": "^1.18.3",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"supertest": "^4.0.2",
|
||||
"tslint": "^6.1.1",
|
||||
"supertest": "^3.1.0",
|
||||
"tslint": "^5.11.0",
|
||||
"tslint-jasmine-noSkipOrFocus": "^1.0.9",
|
||||
"typescript": "^3.8.3"
|
||||
"typescript": "^3.0.1"
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ const EXISTING_DOWNLOADS = [
|
||||
'20-1234567-build.zip',
|
||||
];
|
||||
const OPEN_PRS = [10, 40];
|
||||
const ANY_DATE = jasmine.any(String);
|
||||
|
||||
// Tests
|
||||
describe('BuildCleaner', () => {
|
||||
@ -76,18 +77,22 @@ describe('BuildCleaner', () => {
|
||||
let cleanerRemoveUnnecessaryDownloadsSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner, 'getExistingBuildNumbers').and.resolveTo(EXISTING_BUILDS);
|
||||
cleanerGetOpenPrNumbersSpy = spyOn(cleaner, 'getOpenPrNumbers').and.resolveTo(OPEN_PRS);
|
||||
cleanerGetExistingDownloadsSpy = spyOn(cleaner, 'getExistingDownloads').and.resolveTo(EXISTING_DOWNLOADS);
|
||||
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner, 'getExistingBuildNumbers')
|
||||
.and.callFake(() => Promise.resolve(EXISTING_BUILDS));
|
||||
cleanerGetOpenPrNumbersSpy = spyOn(cleaner, 'getOpenPrNumbers')
|
||||
.and.callFake(() => Promise.resolve(OPEN_PRS));
|
||||
cleanerGetExistingDownloadsSpy = spyOn(cleaner, 'getExistingDownloads')
|
||||
.and.callFake(() => Promise.resolve(EXISTING_DOWNLOADS));
|
||||
|
||||
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner, 'removeUnnecessaryBuilds');
|
||||
cleanerRemoveUnnecessaryDownloadsSpy = spyOn(cleaner, 'removeUnnecessaryDownloads');
|
||||
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
const promise = cleaner.cleanUp();
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
|
||||
// Do not complete the test and release the spies synchronously, to avoid running the actual implementations.
|
||||
await promise;
|
||||
@ -125,32 +130,52 @@ describe('BuildCleaner', () => {
|
||||
|
||||
|
||||
it('should reject if \'getOpenPrNumbers()\' rejects', async () => {
|
||||
cleanerGetOpenPrNumbersSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
try {
|
||||
cleanerGetOpenPrNumbersSpy.and.callFake(() => Promise.reject('Test'));
|
||||
await cleaner.cleanUp();
|
||||
} catch (err) {
|
||||
expect(err).toBe('Test');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getExistingBuildNumbers()\' rejects', async () => {
|
||||
cleanerGetExistingBuildNumbersSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
try {
|
||||
cleanerGetExistingBuildNumbersSpy.and.callFake(() => Promise.reject('Test'));
|
||||
await cleaner.cleanUp();
|
||||
} catch (err) {
|
||||
expect(err).toBe('Test');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getExistingDownloads()\' rejects', async () => {
|
||||
cleanerGetExistingDownloadsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
try {
|
||||
cleanerGetExistingDownloadsSpy.and.callFake(() => Promise.reject('Test'));
|
||||
await cleaner.cleanUp();
|
||||
} catch (err) {
|
||||
expect(err).toBe('Test');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'removeUnnecessaryBuilds()\' rejects', async () => {
|
||||
cleanerRemoveUnnecessaryBuildsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
try {
|
||||
cleanerRemoveUnnecessaryBuildsSpy.and.callFake(() => Promise.reject('Test'));
|
||||
await cleaner.cleanUp();
|
||||
} catch (err) {
|
||||
expect(err).toBe('Test');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'removeUnnecessaryDownloads()\' rejects', async () => {
|
||||
cleanerRemoveUnnecessaryDownloadsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
try {
|
||||
cleanerRemoveUnnecessaryDownloadsSpy.and.callFake(() => Promise.reject('Test'));
|
||||
await cleaner.cleanUp();
|
||||
} catch (err) {
|
||||
expect(err).toBe('Test');
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
@ -162,15 +187,13 @@ describe('BuildCleaner', () => {
|
||||
let promise: Promise<number[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake(
|
||||
((_: string, cb: typeof readdirCb) => readdirCb = cb) as unknown as typeof fs.readdir,
|
||||
);
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
|
||||
promise = cleaner.getExistingBuildNumbers();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -180,27 +203,43 @@ describe('BuildCleaner', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while getting the files', async () => {
|
||||
it('should reject if an error occurs while getting the files', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned files (as numbers)', async () => {
|
||||
it('should resolve with the returned files (as numbers)', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual([12, 34, 56]);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, ['12', '34', '56']);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
|
||||
it('should remove `HIDDEN_DIR_PREFIX` from the filenames', async () => {
|
||||
it('should remove `HIDDEN_DIR_PREFIX` from the filenames', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual([12, 34, 56]);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, [`${HIDDEN_DIR_PREFIX}12`, '34', `${HIDDEN_DIR_PREFIX}56`]);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore files with non-numeric (or zero) names', async () => {
|
||||
it('should ignore files with non-numeric (or zero) names', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual([12, 34, 56]);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, ['12', 'foo', '34', 'bar', '56', '000']);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
});
|
||||
@ -220,7 +259,7 @@ describe('BuildCleaner', () => {
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -229,15 +268,31 @@ describe('BuildCleaner', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while fetching PRs', async () => {
|
||||
it('should reject if an error occurs while fetching PRs', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
prDeferred.reject('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the numbers of the fetched PRs', async () => {
|
||||
it('should resolve with the numbers of the fetched PRs', done => {
|
||||
promise.then(prNumbers => {
|
||||
expect(prNumbers).toEqual([1, 2, 3]);
|
||||
done();
|
||||
});
|
||||
|
||||
prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]);
|
||||
await expectAsync(promise).toBeResolvedTo([1, 2, 3]);
|
||||
});
|
||||
|
||||
|
||||
it('should log the number of open PRs', () => {
|
||||
promise.then(prNumbers => {
|
||||
expect(loggerLogSpy).toHaveBeenCalledWith(
|
||||
ANY_DATE, 'BuildCleaner: ', `Open pull requests: ${prNumbers}`);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -249,15 +304,13 @@ describe('BuildCleaner', () => {
|
||||
let promise: Promise<string[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake(
|
||||
((_: string, cb: typeof readdirCb) => readdirCb = cb) as unknown as typeof fs.readdir,
|
||||
);
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
|
||||
promise = cleaner.getExistingDownloads();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -267,21 +320,33 @@ describe('BuildCleaner', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while getting the files', async () => {
|
||||
it('should reject if an error occurs while getting the files', done => {
|
||||
promise.catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned file names', async () => {
|
||||
it('should resolve with the returned file names', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual(EXISTING_DOWNLOADS);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, EXISTING_DOWNLOADS);
|
||||
await expectAsync(promise).toBeResolvedTo(EXISTING_DOWNLOADS);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore files that do not match the artifactPath', async () => {
|
||||
it('should ignore files that do not match the artifactPath', done => {
|
||||
promise.then(result => {
|
||||
expect(result).toEqual(['10-ABCDEF-build.zip', '30-FFFFFFF-build.zip']);
|
||||
done();
|
||||
});
|
||||
|
||||
readdirCb(null, ['10-ABCDEF-build.zip', '20-AAAAAAA-otherfile.zip', '30-FFFFFFF-build.zip']);
|
||||
await expectAsync(promise).toBeResolvedTo(['10-ABCDEF-build.zip', '30-FFFFFFF-build.zip']);
|
||||
});
|
||||
|
||||
});
|
||||
@ -299,7 +364,7 @@ describe('BuildCleaner', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should test if the directory exists (and return if it does not)', () => {
|
||||
it('should test if the directory exists (and return if is does not)', () => {
|
||||
shellTestSpy.and.returnValue(false);
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
@ -316,19 +381,22 @@ describe('BuildCleaner', () => {
|
||||
|
||||
|
||||
it('should make the directory and its content writable before removing', () => {
|
||||
shellRmSpy.and.callFake(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar'));
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
expect(shellChmodSpy).toHaveBeenCalledBefore(shellRmSpy);
|
||||
expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar');
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should catch errors and log them', () => {
|
||||
shellRmSpy.and.throwError('Test');
|
||||
shellRmSpy.and.callFake(() => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
throw 'Test';
|
||||
});
|
||||
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('ERROR: Unable to remove \'/foo/bar\' due to:', new Error('Test'));
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('ERROR: Unable to remove \'/foo/bar\' due to:', 'Test');
|
||||
});
|
||||
|
||||
});
|
||||
@ -381,7 +449,7 @@ describe('BuildCleaner', () => {
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0);
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(8);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/2'));
|
||||
|
@ -45,15 +45,25 @@ describe('CircleCIApi', () => {
|
||||
const errorMessage = 'Invalid request';
|
||||
const request = nock(BASE_URL).get(`/${buildNum}?circle-token=${TOKEN}`);
|
||||
|
||||
request.replyWithError(errorMessage);
|
||||
await expectAsync(api.getBuildInfo(buildNum)).toBeRejectedWithError(
|
||||
try {
|
||||
request.replyWithError(errorMessage);
|
||||
await api.getBuildInfo(buildNum);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (err) {
|
||||
expect(err.message).toEqual(
|
||||
`CircleCI build info request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
}
|
||||
|
||||
request.reply(404, errorMessage);
|
||||
await expectAsync(api.getBuildInfo(buildNum)).toBeRejectedWithError(
|
||||
try {
|
||||
request.reply(404, errorMessage);
|
||||
await api.getBuildInfo(buildNum);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (err) {
|
||||
expect(err.message).toEqual(
|
||||
`CircleCI build info request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -68,7 +78,8 @@ describe('CircleCIApi', () => {
|
||||
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
|
||||
.reply(200, [artifact0, artifact1, artifact2]);
|
||||
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeResolvedTo('https://url/1');
|
||||
const artifactUrl = await api.getBuildArtifactUrl(buildNum, 'some/path/1');
|
||||
expect(artifactUrl).toEqual('https://url/1');
|
||||
request.done();
|
||||
});
|
||||
|
||||
@ -79,15 +90,25 @@ describe('CircleCIApi', () => {
|
||||
const errorMessage = 'Invalid request';
|
||||
const request = nock(BASE_URL).get(`/${buildNum}/artifacts?circle-token=${TOKEN}`);
|
||||
|
||||
request.replyWithError(errorMessage);
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeRejectedWithError(
|
||||
try {
|
||||
request.replyWithError(errorMessage);
|
||||
await api.getBuildArtifactUrl(buildNum, 'some/path/1');
|
||||
throw new Error('Exception Expected');
|
||||
} catch (err) {
|
||||
expect(err.message).toEqual(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
}
|
||||
|
||||
request.reply(404, errorMessage);
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeRejectedWithError(
|
||||
try {
|
||||
request.reply(404, errorMessage);
|
||||
await api.getBuildArtifactUrl(buildNum, 'some/path/1');
|
||||
throw new Error('Exception Expected');
|
||||
} catch (err) {
|
||||
expect(err.message).toEqual(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
}
|
||||
});
|
||||
|
||||
it('should throw an error if the response does not contain the specified artifact', async () => {
|
||||
@ -100,9 +121,14 @@ describe('CircleCIApi', () => {
|
||||
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
|
||||
.reply(200, [artifact0, artifact1, artifact2]);
|
||||
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/3')).toBeRejectedWithError(
|
||||
try {
|
||||
await api.getBuildArtifactUrl(buildNum, 'some/path/3');
|
||||
throw new Error('Exception Expected');
|
||||
} catch (err) {
|
||||
expect(err.message).toEqual(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(Missing artifact (some/path/3) for CircleCI build: ${buildNum})`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -118,7 +118,7 @@ describe('GithubApi', () => {
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((api as any).getPaginated()).toBeInstanceOf(Promise);
|
||||
expect((api as any).getPaginated()).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -131,30 +131,45 @@ describe('GithubApi', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should reject if the request fails', async () => {
|
||||
const responsePromise = (api as any).getPaginated('/foo/bar');
|
||||
it('should reject if the request fails', done => {
|
||||
(api as any).getPaginated('/foo/bar').catch((err: any) => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].reject('Test');
|
||||
|
||||
await expectAsync(responsePromise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned items', async () => {
|
||||
it('should resolve with the returned items', done => {
|
||||
const items = [{id: 1}, {id: 2}];
|
||||
const responsePromise = (api as any).getPaginated('/foo/bar');
|
||||
deferreds[0].resolve(items);
|
||||
|
||||
await expectAsync(responsePromise).toBeResolvedTo(items);
|
||||
(api as any).getPaginated('/foo/bar').then((data: any) => {
|
||||
expect(data).toEqual(items);
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].resolve(items);
|
||||
});
|
||||
|
||||
|
||||
it('should iteratively call \'get()\' to fetch all items', async () => {
|
||||
it('should iteratively call \'get()\' to fetch all items', done => {
|
||||
// Create an array or 250 objects.
|
||||
const allItems = new Array(250).fill(null).map((_, i) => ({id: i}));
|
||||
const allItems = '.'.repeat(250).split('').map((_, i) => ({id: i}));
|
||||
const apiGetSpy = api.get as jasmine.Spy;
|
||||
const paramsForPage = (page: number) => ({baz: 'qux', page, per_page: 100});
|
||||
|
||||
const responsePromise = (api as any).getPaginated('/foo/bar', {baz: 'qux'});
|
||||
(api as any).getPaginated('/foo/bar', {baz: 'qux'}).then((data: any) => {
|
||||
const paramsForPage = (page: number) => ({baz: 'qux', page, per_page: 100});
|
||||
|
||||
expect(apiGetSpy).toHaveBeenCalledTimes(3);
|
||||
expect(apiGetSpy.calls.argsFor(0)).toEqual(['/foo/bar', paramsForPage(1)]);
|
||||
expect(apiGetSpy.calls.argsFor(1)).toEqual(['/foo/bar', paramsForPage(2)]);
|
||||
expect(apiGetSpy.calls.argsFor(2)).toEqual(['/foo/bar', paramsForPage(3)]);
|
||||
|
||||
expect(data).toEqual(allItems);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
deferreds[0].resolve(allItems.slice(0, 100));
|
||||
setTimeout(() => {
|
||||
@ -163,13 +178,6 @@ describe('GithubApi', () => {
|
||||
deferreds[2].resolve(allItems.slice(200));
|
||||
}, 0);
|
||||
}, 0);
|
||||
|
||||
await expectAsync(responsePromise).toBeResolvedTo(allItems);
|
||||
|
||||
expect(apiGetSpy).toHaveBeenCalledTimes(3);
|
||||
expect(apiGetSpy.calls.argsFor(0)).toEqual(['/foo/bar', paramsForPage(1)]);
|
||||
expect(apiGetSpy.calls.argsFor(1)).toEqual(['/foo/bar', paramsForPage(2)]);
|
||||
expect(apiGetSpy.calls.argsFor(2)).toEqual(['/foo/bar', paramsForPage(3)]);
|
||||
});
|
||||
|
||||
});
|
||||
@ -209,8 +217,8 @@ describe('GithubApi', () => {
|
||||
|
||||
describe('request()', () => {
|
||||
it('should return a promise', () => {
|
||||
nock('https://api.github.com').get('/').reply(200);
|
||||
expect((api as any).request()).toBeInstanceOf(Promise);
|
||||
nock('https://api.github.com').get('').reply(200);
|
||||
expect((api as any).request()).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -239,8 +247,9 @@ describe('GithubApi', () => {
|
||||
nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.replyWithError('Test');
|
||||
|
||||
await expectAsync((api as any).request('method', '/path')).toBeRejectedWithError('Test');
|
||||
let message = 'Failed to reject error';
|
||||
await (api as any).request('method', '/path').catch((err: any) => message = err.message);
|
||||
expect(message).toEqual('Test');
|
||||
});
|
||||
|
||||
|
||||
@ -254,69 +263,81 @@ describe('GithubApi', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should reject if response statusCode is <200', async () => {
|
||||
it('should reject if response statusCode is <200', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(199);
|
||||
const responsePromise = (api as any).request('method', '/path');
|
||||
|
||||
await expectAsync(responsePromise).toBeRejectedWith(jasmine.stringMatching('failed'));
|
||||
await expectAsync(responsePromise).toBeRejectedWith(jasmine.stringMatching('status: 199'));
|
||||
|
||||
(api as any).request('method', '/path')
|
||||
.catch((err: string) => {
|
||||
expect(err).toContain('failed');
|
||||
expect(err).toContain('status: 199');
|
||||
done();
|
||||
});
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if response statusCode is >=400', async () => {
|
||||
it('should reject if response statusCode is >=400', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(400);
|
||||
const responsePromise = (api as any).request('method', '/path');
|
||||
|
||||
await expectAsync(responsePromise).toBeRejectedWith(jasmine.stringMatching('failed'));
|
||||
await expectAsync(responsePromise).toBeRejectedWith(jasmine.stringMatching('status: 400'));
|
||||
|
||||
(api as any).request('method', '/path')
|
||||
.catch((err: string) => {
|
||||
expect(err).toContain('failed');
|
||||
expect(err).toContain('status: 400');
|
||||
done();
|
||||
});
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
|
||||
it('should include the response text in the rejection message', async () => {
|
||||
it('should include the response text in the rejection message', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(500, 'Test');
|
||||
const responsePromise = (api as any).request('method', '/path');
|
||||
|
||||
await expectAsync(responsePromise).toBeRejectedWith(jasmine.stringMatching('Test'));
|
||||
|
||||
(api as any).request('method', '/path')
|
||||
.catch((err: string) => {
|
||||
expect(err).toContain('Test');
|
||||
done();
|
||||
});
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
|
||||
it('should resolve if returned statusCode is >=200 and <400', async () => {
|
||||
it('should resolve if returned statusCode is >=200 and <400', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(200);
|
||||
|
||||
await expectAsync((api as any).request('method', '/path')).toBeResolved();
|
||||
(api as any).request('method', '/path').then(done);
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
|
||||
it('should parse the response body into an object using \'JSON.parse\'', async () => {
|
||||
it('should parse the response body into an object using \'JSON.parse\'', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(300, '{"foo": "bar"}');
|
||||
|
||||
await expectAsync((api as any).request('method', '/path')).toBeResolvedTo({foo: 'bar'});
|
||||
(api as any).request('method', '/path').then((data: any) => {
|
||||
expect(data).toEqual({foo: 'bar'});
|
||||
done();
|
||||
});
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
it('should reject if the response text is malformed JSON', async () => {
|
||||
it('should reject if the response text is malformed JSON', done => {
|
||||
const requestHandler = nock('https://api.github.com')
|
||||
.intercept('/path', 'method')
|
||||
.reply(300, '}');
|
||||
|
||||
await expectAsync((api as any).request('method', '/path')).toBeRejectedWithError(SyntaxError);
|
||||
(api as any).request('method', '/path').catch((err: any) => {
|
||||
expect(err).toEqual(jasmine.any(SyntaxError));
|
||||
done();
|
||||
});
|
||||
requestHandler.done();
|
||||
});
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Imports
|
||||
import {GithubApi} from '../../lib/common/github-api';
|
||||
import {GithubPullRequests, PullRequest} from '../../lib/common/github-pull-requests';
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
|
||||
// Tests
|
||||
describe('GithubPullRequests', () => {
|
||||
@ -47,21 +47,27 @@ describe('GithubPullRequests', () => {
|
||||
|
||||
|
||||
it('should make a POST request to Github with the correct pathname, params and data', () => {
|
||||
githubApi.post.and.resolveTo();
|
||||
githubApi.post.and.callFake(() => Promise.resolve());
|
||||
prs.addComment(42, 'body');
|
||||
expect(githubApi.post).toHaveBeenCalledWith('/repos/foo/bar/issues/42/comments', null, {body: 'body'});
|
||||
});
|
||||
|
||||
|
||||
it('should reject if the request fails', async () => {
|
||||
githubApi.post.and.rejectWith('Test');
|
||||
await expectAsync(prs.addComment(42, 'body')).toBeRejectedWith('Test');
|
||||
it('should reject if the request fails', done => {
|
||||
githubApi.post.and.callFake(() => Promise.reject('Test'));
|
||||
prs.addComment(42, 'body').catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the data from the Github POST', async () => {
|
||||
githubApi.post.and.resolveTo('Test');
|
||||
await expectAsync(prs.addComment(42, 'body')).toBeResolvedTo('Test');
|
||||
it('should resolve with the data from the Github POST', done => {
|
||||
githubApi.post.and.callFake(() => Promise.resolve('Test'));
|
||||
prs.addComment(42, 'body').then(data => {
|
||||
expect(data).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -81,11 +87,13 @@ describe('GithubPullRequests', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the data returned from GitHub', async () => {
|
||||
it('should resolve with the data returned from GitHub', done => {
|
||||
const expected: any = {number: 42};
|
||||
githubApi.get.and.resolveTo(expected);
|
||||
|
||||
await expectAsync(prs.fetch(42)).toBeResolvedTo(expected);
|
||||
githubApi.get.and.callFake(() => Promise.resolve(expected));
|
||||
prs.fetch(42).then(data => {
|
||||
expect(data).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -117,14 +125,9 @@ describe('GithubPullRequests', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should forward the value returned by \'getPaginated()\'', async () => {
|
||||
const mockPrs: PullRequest[] = [
|
||||
{number: 1, user: {login: 'foo'}, labels: []},
|
||||
{number: 2, user: {login: 'bar'}, labels: []},
|
||||
];
|
||||
|
||||
githubApi.getPaginated.and.resolveTo(mockPrs);
|
||||
expect(await prs.fetchAll()).toBe(mockPrs);
|
||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||
githubApi.getPaginated.and.returnValue('Test');
|
||||
expect(prs.fetchAll() as any).toBe('Test');
|
||||
});
|
||||
|
||||
});
|
||||
@ -144,11 +147,13 @@ describe('GithubPullRequests', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the data returned from GitHub', async () => {
|
||||
it('should resolve with the data returned from GitHub', done => {
|
||||
const expected: any = [{sha: 'ABCDE', filename: 'a/b/c'}, {sha: '12345', filename: 'x/y/z'}];
|
||||
githubApi.getPaginated.and.resolveTo(expected);
|
||||
|
||||
await expectAsync(prs.fetchFiles(42)).toBeResolvedTo(expected);
|
||||
githubApi.getPaginated.and.callFake(() => Promise.resolve(expected));
|
||||
prs.fetchFiles(42).then(data => {
|
||||
expect(data).toEqual(expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {GithubApi} from '../../lib/common/github-api';
|
||||
import {GithubTeams, Team} from '../../lib/common/github-teams';
|
||||
import {GithubTeams} from '../../lib/common/github-teams';
|
||||
|
||||
// Tests
|
||||
describe('GithubTeams', () => {
|
||||
@ -16,7 +16,6 @@ describe('GithubTeams', () => {
|
||||
expect(() => new GithubTeams(githubApi, '')).
|
||||
toThrowError('Missing or empty required parameter \'githubOrg\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
@ -34,14 +33,9 @@ describe('GithubTeams', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should forward the value returned by \'getPaginated()\'', async () => {
|
||||
const mockTeams: Team[] = [
|
||||
{id: 1, slug: 'foo'},
|
||||
{id: 2, slug: 'bar'},
|
||||
];
|
||||
|
||||
githubApi.getPaginated.and.resolveTo(mockTeams);
|
||||
expect(await teams.fetchAll()).toBe(mockTeams);
|
||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||
githubApi.getPaginated.and.returnValue('Test');
|
||||
expect(teams.fetchAll() as any).toBe('Test');
|
||||
});
|
||||
|
||||
});
|
||||
@ -56,77 +50,100 @@ describe('GithubTeams', () => {
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
githubApi.get.and.resolveTo();
|
||||
githubApi.get.and.callFake(() => Promise.resolve());
|
||||
const promise = teams.isMemberById('user', [1]);
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if called with an empty array', async () => {
|
||||
await expectAsync(teams.isMemberById('user', [])).toBeResolvedTo(false);
|
||||
expect(githubApi.get).not.toHaveBeenCalled();
|
||||
it('should resolve with false if called with an empty array', done => {
|
||||
teams.isMemberById('user', []).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(githubApi.get).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'get()\' with the correct pathname', async () => {
|
||||
githubApi.get.and.resolveTo();
|
||||
await teams.isMemberById('user', [1]);
|
||||
|
||||
expect(githubApi.get).toHaveBeenCalledWith('/teams/1/memberships/user');
|
||||
it('should call \'get()\' with the correct pathname', done => {
|
||||
githubApi.get.and.callFake(() => Promise.resolve());
|
||||
teams.isMemberById('user', [1]).then(() => {
|
||||
expect(githubApi.get).toHaveBeenCalledWith('/teams/1/memberships/user');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'get()\' rejects', async () => {
|
||||
githubApi.get.and.rejectWith(null);
|
||||
|
||||
await expectAsync(teams.isMemberById('user', [1])).toBeResolvedTo(false);
|
||||
expect(githubApi.get).toHaveBeenCalled();
|
||||
it('should resolve with false if \'get()\' rejects', done => {
|
||||
githubApi.get.and.callFake(() => Promise.reject(null));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(githubApi.get).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if the membership is not active', async () => {
|
||||
githubApi.get.and.resolveTo({state: 'pending'});
|
||||
|
||||
await expectAsync(teams.isMemberById('user', [1])).toBeResolvedTo(false);
|
||||
expect(githubApi.get).toHaveBeenCalled();
|
||||
it('should resolve with false if the membership is not active', done => {
|
||||
githubApi.get.and.callFake(() => Promise.resolve({state: 'pending'}));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(githubApi.get).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with true if the membership is active', async () => {
|
||||
githubApi.get.and.resolveTo({state: 'active'});
|
||||
await expectAsync(teams.isMemberById('user', [1])).toBeResolvedTo(true);
|
||||
it('should resolve with true if the membership is active', done => {
|
||||
githubApi.get.and.callFake(() => Promise.resolve({state: 'active'}));
|
||||
teams.isMemberById('user', [1]).then(isMember => {
|
||||
expect(isMember).toBe(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should sequentially call \'get()\' until an active membership is found', async () => {
|
||||
githubApi.get.
|
||||
withArgs('/teams/1/memberships/user').and.resolveTo({state: 'pending'}).
|
||||
withArgs('/teams/2/memberships/user').and.rejectWith(null).
|
||||
withArgs('/teams/3/memberships/user').and.resolveTo({state: 'active'});
|
||||
it('should sequentially call \'get()\' until an active membership is found', done => {
|
||||
const trainedResponses: {[pathname: string]: Promise<{state: string}>} = {
|
||||
'/teams/1/memberships/user': Promise.resolve({state: 'pending'}),
|
||||
'/teams/2/memberships/user': Promise.reject(null),
|
||||
'/teams/3/memberships/user': Promise.resolve({state: 'active'}),
|
||||
};
|
||||
githubApi.get.and.callFake((pathname: string) => trainedResponses[pathname]);
|
||||
|
||||
await expectAsync(teams.isMemberById('user', [1, 2, 3, 4])).toBeResolvedTo(true);
|
||||
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
|
||||
expect(isMember).toBe(true);
|
||||
|
||||
expect(githubApi.get).toHaveBeenCalledTimes(3);
|
||||
expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
expect(githubApi.get).toHaveBeenCalledTimes(3);
|
||||
expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if no active membership is found', async () => {
|
||||
githubApi.get.
|
||||
withArgs('/teams/1/memberships/user').and.resolveTo({state: 'pending'}).
|
||||
withArgs('/teams/2/memberships/user').and.rejectWith(null).
|
||||
withArgs('/teams/3/memberships/user').and.resolveTo({state: 'not active'}).
|
||||
withArgs('/teams/4/memberships/user').and.rejectWith(null);
|
||||
it('should resolve with false if no active membership is found', done => {
|
||||
const trainedResponses: {[pathname: string]: Promise<{state: string}>} = {
|
||||
'/teams/1/memberships/user': Promise.resolve({state: 'pending'}),
|
||||
'/teams/2/memberships/user': Promise.reject(null),
|
||||
'/teams/3/memberships/user': Promise.resolve({state: 'not active'}),
|
||||
'/teams/4/memberships/user': Promise.reject(null),
|
||||
};
|
||||
githubApi.get.and.callFake((pathname: string) => trainedResponses[pathname]);
|
||||
|
||||
await expectAsync(teams.isMemberById('user', [1, 2, 3, 4])).toBeResolvedTo(false);
|
||||
teams.isMemberById('user', [1, 2, 3, 4]).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
|
||||
expect(githubApi.get).toHaveBeenCalledTimes(4);
|
||||
expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(3)[0]).toBe('/teams/4/memberships/user');
|
||||
expect(githubApi.get).toHaveBeenCalledTimes(4);
|
||||
expect(githubApi.get.calls.argsFor(0)[0]).toBe('/teams/1/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(1)[0]).toBe('/teams/2/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(2)[0]).toBe('/teams/3/memberships/user');
|
||||
expect(githubApi.get.calls.argsFor(3)[0]).toBe('/teams/4/memberships/user');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -140,13 +157,14 @@ describe('GithubTeams', () => {
|
||||
beforeEach(() => {
|
||||
teams = new GithubTeams(githubApi, 'foo');
|
||||
|
||||
teamsFetchAllSpy = spyOn(teams, 'fetchAll').and.resolveTo([{id: 1, slug: 'team1'}, {id: 2, slug: 'team2'}]);
|
||||
const mockResponse = Promise.resolve([{id: 1, slug: 'team1'}, {id: 2, slug: 'team2'}]);
|
||||
teamsFetchAllSpy = spyOn(teams, 'fetchAll').and.returnValue(mockResponse);
|
||||
teamsIsMemberByIdSpy = spyOn(teams, 'isMemberById');
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(teams.isMemberBySlug('user', ['team-slug'])).toBeInstanceOf(Promise);
|
||||
expect(teams.isMemberBySlug('user', ['team-slug'])).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -156,46 +174,55 @@ describe('GithubTeams', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'fetchAll()\' rejects', async () => {
|
||||
teamsFetchAllSpy.and.rejectWith(null);
|
||||
await expectAsync(teams.isMemberBySlug('user', ['team-slug'])).toBeResolvedTo(false);
|
||||
it('should resolve with false if \'fetchAll()\' rejects', done => {
|
||||
teamsFetchAllSpy.and.callFake(() => Promise.reject(null));
|
||||
teams.isMemberBySlug('user', ['team-slug']).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'isMemberById()\' with the correct params if no team is found', async () => {
|
||||
await teams.isMemberBySlug('user', ['no-match']);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', []);
|
||||
it('should call \'isMemberById()\' with the correct params if no team is found', done => {
|
||||
teams.isMemberBySlug('user', ['no-match']).then(() => {
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should call \'isMemberById()\' with the correct params if teams are found', async () => {
|
||||
await teams.isMemberBySlug('userA', ['team1']);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('userA', [1]);
|
||||
it('should call \'isMemberById()\' with the correct params if teams are found', done => {
|
||||
const spy = teamsIsMemberByIdSpy;
|
||||
|
||||
await teams.isMemberBySlug('userB', ['team2']);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('userB', [2]);
|
||||
|
||||
await teams.isMemberBySlug('userC', ['team1', 'team2']);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('userC', [1, 2]);
|
||||
Promise.all([
|
||||
teams.isMemberBySlug('user', ['team1']).then(() => expect(spy).toHaveBeenCalledWith('user', [1])),
|
||||
teams.isMemberBySlug('user', ['team2']).then(() => expect(spy).toHaveBeenCalledWith('user', [2])),
|
||||
teams.isMemberBySlug('user', ['team1', 'team2']).then(() => expect(spy).toHaveBeenCalledWith('user', [1, 2])),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with false if \'isMemberById()\' rejects', async () => {
|
||||
teamsIsMemberByIdSpy.and.rejectWith(null);
|
||||
|
||||
await expectAsync(teams.isMemberBySlug('user', ['team1'])).toBeResolvedTo(false);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalled();
|
||||
it('should resolve with false if \'isMemberById()\' rejects', done => {
|
||||
teamsIsMemberByIdSpy.and.callFake(() => Promise.reject(null));
|
||||
teams.isMemberBySlug('user', ['team1']).then(isMember => {
|
||||
expect(isMember).toBe(false);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the value \'isMemberById()\' resolves with', async () => {
|
||||
teamsIsMemberByIdSpy.and.resolveTo(true);
|
||||
await expectAsync(teams.isMemberBySlug('userA', ['team1'])).toBeResolvedTo(true);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('userA', [1]);
|
||||
|
||||
teamsIsMemberByIdSpy.and.resolveTo(false);
|
||||
await expectAsync(teams.isMemberBySlug('userB', ['team1'])).toBeResolvedTo(false);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('userB', [1]);
|
||||
teamsIsMemberByIdSpy.and.callFake(() => Promise.resolve(true));
|
||||
const isMember1 = await teams.isMemberBySlug('user', ['team1']);
|
||||
expect(isMember1).toBe(true);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', [1]);
|
||||
|
||||
teamsIsMemberByIdSpy.and.callFake(() => Promise.resolve(false));
|
||||
const isMember2 = await teams.isMemberBySlug('user', ['team1']);
|
||||
expect(isMember2).toBe(false);
|
||||
expect(teamsIsMemberByIdSpy).toHaveBeenCalledWith('user', [1]);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -9,8 +9,7 @@ import {Logger} from '../../lib/common/utils';
|
||||
import {BuildCreator} from '../../lib/preview-server/build-creator';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/preview-server/build-events';
|
||||
import {PreviewServerError} from '../../lib/preview-server/preview-error';
|
||||
import {customAsyncMatchers} from './jasmine-custom-async-matchers';
|
||||
|
||||
import {expectToBePreviewServerError} from './helpers';
|
||||
|
||||
// Tests
|
||||
describe('BuildCreator', () => {
|
||||
@ -25,7 +24,6 @@ describe('BuildCreator', () => {
|
||||
const publicShaDir = path.join(publicPrDir, shortSha);
|
||||
let bc: BuildCreator;
|
||||
|
||||
beforeEach(() => jasmine.addAsyncMatchers(customAsyncMatchers));
|
||||
beforeEach(() => bc = new BuildCreator(buildsDir));
|
||||
|
||||
|
||||
@ -37,8 +35,8 @@ describe('BuildCreator', () => {
|
||||
|
||||
|
||||
it('should extend EventEmitter', () => {
|
||||
expect(bc).toBeInstanceOf(BuildCreator);
|
||||
expect(bc).toBeInstanceOf(EventEmitter);
|
||||
expect(bc).toEqual(jasmine.any(BuildCreator));
|
||||
expect(bc).toEqual(jasmine.any(EventEmitter));
|
||||
|
||||
expect(Object.getPrototypeOf(bc)).toBe(BuildCreator.prototype);
|
||||
});
|
||||
@ -69,43 +67,47 @@ describe('BuildCreator', () => {
|
||||
const shaDir = isPublic ? publicShaDir : hiddenShaDir;
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
it('should return a promise', done => {
|
||||
const promise = bc.create(pr, sha, archive, isPublic);
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `extractArchive()`.
|
||||
|
||||
// Do not complete the test (and release the spies) synchronously to avoid running the actual
|
||||
// `extractArchive()`.
|
||||
await promise;
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility first if necessary', async () => {
|
||||
await bc.create(pr, sha, archive, isPublic);
|
||||
it('should update the PR\'s visibility first if necessary', done => {
|
||||
bcUpdatePrVisibilitySpy.and.callFake(() => expect(shellMkdirSpy).not.toHaveBeenCalled());
|
||||
|
||||
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledBefore(shellMkdirSpy);
|
||||
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(pr, isPublic);
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
bc.create(pr, sha, archive, isPublic).
|
||||
then(() => {
|
||||
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(pr, isPublic);
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should create the build directory (and any parent directories)', async () => {
|
||||
await bc.create(pr, sha, archive, isPublic);
|
||||
expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir);
|
||||
it('should create the build directory (and any parent directories)', done => {
|
||||
bc.create(pr, sha, archive, isPublic).
|
||||
then(() => expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should extract the archive contents into the build directory', async () => {
|
||||
await bc.create(pr, sha, archive, isPublic);
|
||||
expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir);
|
||||
it('should extract the archive contents into the build directory', done => {
|
||||
bc.create(pr, sha, archive, isPublic).
|
||||
then(() => expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should emit a CreatedBuildEvent on success', async () => {
|
||||
it('should emit a CreatedBuildEvent on success', done => {
|
||||
let emitted = false;
|
||||
|
||||
bcEmitSpy.and.callFake((type: string, evt: CreatedBuildEvent) => {
|
||||
expect(type).toBe(CreatedBuildEvent.type);
|
||||
expect(evt).toBeInstanceOf(CreatedBuildEvent);
|
||||
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
||||
expect(evt.pr).toBe(+pr);
|
||||
expect(evt.sha).toBe(shortSha);
|
||||
expect(evt.isPublic).toBe(isPublic);
|
||||
@ -113,108 +115,130 @@ describe('BuildCreator', () => {
|
||||
emitted = true;
|
||||
});
|
||||
|
||||
await bc.create(pr, sha, archive, isPublic);
|
||||
expect(emitted).toBe(true);
|
||||
bc.create(pr, sha, archive, isPublic).
|
||||
then(() => expect(emitted).toBe(true)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('on error', () => {
|
||||
let existsValues: {[dir: string]: boolean};
|
||||
|
||||
beforeEach(() => {
|
||||
bcExistsSpy.and.returnValue(false);
|
||||
existsValues = {
|
||||
[prDir]: false,
|
||||
[shaDir]: false,
|
||||
};
|
||||
|
||||
bcExistsSpy.and.callFake((dir: string) => existsValues[dir]);
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if changing the PR\'s visibility fails', async () => {
|
||||
it('should abort and skip further operations if changing the PR\'s visibility fails', done => {
|
||||
const mockError = new PreviewServerError(543, 'Test');
|
||||
bcUpdatePrVisibilitySpy.and.rejectWith(mockError);
|
||||
bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject(mockError));
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejectedWith(mockError);
|
||||
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||
expect(err).toBe(mockError);
|
||||
|
||||
expect(bcExistsSpy).not.toHaveBeenCalled();
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
expect(bcExistsSpy).not.toHaveBeenCalled();
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if the build does already exist', async () => {
|
||||
bcExistsSpy.withArgs(shaDir).and.returnValue(true);
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejectedWithPreviewServerError(
|
||||
409, `Request to overwrite existing ${isPublic ? '' : 'non-'}public directory: ${shaDir}`);
|
||||
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
it('should abort and skip further operations if the build does already exist', done => {
|
||||
existsValues[shaDir] = true;
|
||||
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||
const publicOrNot = isPublic ? 'public' : 'non-public';
|
||||
expectToBePreviewServerError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should detect existing build directory after visibility change', async () => {
|
||||
bcUpdatePrVisibilitySpy.and.callFake(() => bcExistsSpy.and.returnValue(true));
|
||||
it('should detect existing build directory after visibility change', done => {
|
||||
bcUpdatePrVisibilitySpy.and.callFake(() => existsValues[prDir] = existsValues[shaDir] = true);
|
||||
|
||||
expect(bcExistsSpy(prDir)).toBe(false);
|
||||
expect(bcExistsSpy(shaDir)).toBe(false);
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejectedWithPreviewServerError(
|
||||
409, `Request to overwrite existing ${isPublic ? '' : 'non-'}public directory: ${shaDir}`);
|
||||
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||
const publicOrNot = isPublic ? 'public' : 'non-public';
|
||||
expectToBePreviewServerError(err, 409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to create the directories', async () => {
|
||||
it('should abort and skip further operations if it fails to create the directories', done => {
|
||||
shellMkdirSpy.and.throwError('');
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejected();
|
||||
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to extract the archive', async () => {
|
||||
it('should abort and skip further operations if it fails to extract the archive', done => {
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should delete the PR directory (for new PR)', done => {
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should delete the SHA directory (for existing PR)', done => {
|
||||
existsValues[prDir] = true;
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejected();
|
||||
|
||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||
expect(bcExtractArchiveSpy).toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should delete the PR directory (for new PR)', async () => {
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejected();
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
|
||||
});
|
||||
|
||||
|
||||
it('should delete the SHA directory (for existing PR)', async () => {
|
||||
bcExistsSpy.withArgs(prDir).and.returnValue(true);
|
||||
bcExtractArchiveSpy.and.throwError('');
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejected();
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
|
||||
});
|
||||
|
||||
|
||||
it('should reject with an PreviewServerError', async () => {
|
||||
it('should reject with an PreviewServerError', done => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
shellMkdirSpy.and.callFake(() => { throw 'Test'; });
|
||||
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejectedWithPreviewServerError(
|
||||
500, `Error while creating preview at: ${shaDir}\nTest`);
|
||||
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||
expectToBePreviewServerError(err, 500, `Error while creating preview at: ${shaDir}\nTest`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should pass PreviewServerError instances unmodified', async () => {
|
||||
it('should pass PreviewServerError instances unmodified', done => {
|
||||
shellMkdirSpy.and.callFake(() => { throw new PreviewServerError(543, 'Test'); });
|
||||
await expectAsync(bc.create(pr, sha, archive, isPublic)).toBeRejectedWithPreviewServerError(543, 'Test');
|
||||
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||
expectToBePreviewServerError(err, 543, 'Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -241,12 +265,12 @@ describe('BuildCreator', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
it('should return a promise', done => {
|
||||
const promise = bc.updatePrVisibility(pr, true);
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `extractArchive()`.
|
||||
|
||||
// Do not complete the test (and release the spies) synchronously to avoid running the actual `extractArchive()`.
|
||||
await promise;
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -255,53 +279,58 @@ describe('BuildCreator', () => {
|
||||
const newPrDir = makePublic ? publicPrDir : hiddenPrDir;
|
||||
|
||||
|
||||
it('should rename the directory', async () => {
|
||||
await bc.updatePrVisibility(pr, makePublic);
|
||||
expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir);
|
||||
it('should rename the directory', done => {
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(() => expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('when the visibility is updated', () => {
|
||||
|
||||
it('should resolve to true', async () => {
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeResolvedTo(true);
|
||||
it('should resolve to true', done => {
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(result => expect(result).toBe(true)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should rename the directory', async () => {
|
||||
await bc.updatePrVisibility(pr, makePublic);
|
||||
expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir);
|
||||
it('should rename the directory', done => {
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(() => expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should emit a ChangedPrVisibilityEvent on success', async () => {
|
||||
it('should emit a ChangedPrVisibilityEvent on success', done => {
|
||||
let emitted = false;
|
||||
|
||||
bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => {
|
||||
expect(type).toBe(ChangedPrVisibilityEvent.type);
|
||||
expect(evt).toBeInstanceOf(ChangedPrVisibilityEvent);
|
||||
expect(evt).toEqual(jasmine.any(ChangedPrVisibilityEvent));
|
||||
expect(evt.pr).toBe(+pr);
|
||||
expect(evt.shas).toBeInstanceOf(Array);
|
||||
expect(evt.shas).toEqual(jasmine.any(Array));
|
||||
expect(evt.isPublic).toBe(makePublic);
|
||||
|
||||
emitted = true;
|
||||
});
|
||||
|
||||
await bc.updatePrVisibility(pr, makePublic);
|
||||
expect(emitted).toBe(true);
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(() => expect(emitted).toBe(true)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should include all shas in the emitted event', async () => {
|
||||
it('should include all shas in the emitted event', done => {
|
||||
const shas = ['foo', 'bar', 'baz'];
|
||||
let emitted = false;
|
||||
|
||||
bcListShasByDate.and.resolveTo(shas);
|
||||
bcListShasByDate.and.callFake(() => Promise.resolve(shas));
|
||||
bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => {
|
||||
expect(bcListShasByDate).toHaveBeenCalledWith(newPrDir);
|
||||
|
||||
expect(type).toBe(ChangedPrVisibilityEvent.type);
|
||||
expect(evt).toBeInstanceOf(ChangedPrVisibilityEvent);
|
||||
expect(evt).toEqual(jasmine.any(ChangedPrVisibilityEvent));
|
||||
expect(evt.pr).toBe(+pr);
|
||||
expect(evt.shas).toBe(shas);
|
||||
expect(evt.isPublic).toBe(makePublic);
|
||||
@ -309,82 +338,94 @@ describe('BuildCreator', () => {
|
||||
emitted = true;
|
||||
});
|
||||
|
||||
await bc.updatePrVisibility(pr, makePublic);
|
||||
expect(emitted).toBe(true);
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(() => expect(emitted).toBe(true)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if the visibility is already up-to-date', async () => {
|
||||
it('should do nothing if the visibility is already up-to-date', done => {
|
||||
bcExistsSpy.and.callFake((dir: string) => dir === newPrDir);
|
||||
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeResolvedTo(false);
|
||||
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(result => {
|
||||
expect(result).toBe(false);
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if the PR directory does not exist', async () => {
|
||||
it('should do nothing if the PR directory does not exist', done => {
|
||||
bcExistsSpy.and.returnValue(false);
|
||||
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeResolvedTo(false);
|
||||
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.updatePrVisibility(pr, makePublic).
|
||||
then(result => {
|
||||
expect(result).toBe(false);
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('on error', () => {
|
||||
|
||||
it('should abort and skip further operations if both directories exist', async () => {
|
||||
it('should abort and skip further operations if both directories exist', done => {
|
||||
bcExistsSpy.and.returnValue(true);
|
||||
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeRejectedWithPreviewServerError(
|
||||
409, `Request to move '${oldPrDir}' to existing directory '${newPrDir}'.`);
|
||||
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||
expectToBePreviewServerError(err, 409,
|
||||
`Request to move '${oldPrDir}' to existing directory '${newPrDir}'.`);
|
||||
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to rename the directory', async () => {
|
||||
it('should abort and skip further operations if it fails to rename the directory', done => {
|
||||
shellMvSpy.and.throwError('');
|
||||
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeRejected();
|
||||
|
||||
expect(shellMvSpy).toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.updatePrVisibility(pr, makePublic).catch(() => {
|
||||
expect(shellMvSpy).toHaveBeenCalled();
|
||||
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to list the SHAs', async () => {
|
||||
it('should abort and skip further operations if it fails to list the SHAs', done => {
|
||||
bcListShasByDate.and.throwError('');
|
||||
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeRejected();
|
||||
|
||||
expect(shellMvSpy).toHaveBeenCalled();
|
||||
expect(bcListShasByDate).toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
bc.updatePrVisibility(pr, makePublic).catch(() => {
|
||||
expect(shellMvSpy).toHaveBeenCalled();
|
||||
expect(bcListShasByDate).toHaveBeenCalled();
|
||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should reject with an PreviewServerError', async () => {
|
||||
it('should reject with an PreviewServerError', done => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
shellMvSpy.and.callFake(() => { throw 'Test'; });
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeRejectedWithPreviewServerError(
|
||||
500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\nTest`);
|
||||
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||
expectToBePreviewServerError(err, 500,
|
||||
`Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\nTest`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should pass PreviewServerError instances unmodified', async () => {
|
||||
it('should pass PreviewServerError instances unmodified', done => {
|
||||
shellMvSpy.and.callFake(() => { throw new PreviewServerError(543, 'Test'); });
|
||||
await expectAsync(bc.updatePrVisibility(pr, makePublic)).toBeRejectedWithPreviewServerError(543, 'Test');
|
||||
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||
expectToBePreviewServerError(err, 543, 'Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -402,14 +443,12 @@ describe('BuildCreator', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
fsAccessCbs = [];
|
||||
fsAccessSpy = spyOn(fs, 'access').and.callFake(
|
||||
((_: string, cb: (v?: any) => void) => fsAccessCbs.push(cb)) as unknown as typeof fs.access,
|
||||
);
|
||||
fsAccessSpy = spyOn(fs, 'access').and.callFake((_: string, cb: (v?: any) => void) => fsAccessCbs.push(cb));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((bc as any).exists('foo')).toBeInstanceOf(Promise);
|
||||
expect((bc as any).exists('foo')).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -419,29 +458,25 @@ describe('BuildCreator', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with \'true\' if \'fs.access()\' succeeds', async () => {
|
||||
const existsPromises = [
|
||||
(bc as any).exists('foo'),
|
||||
(bc as any).exists('bar'),
|
||||
];
|
||||
it('should resolve with \'true\' if \'fs.access()\' succeeds', done => {
|
||||
Promise.
|
||||
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
|
||||
then(results => expect(results).toEqual([true, true])).
|
||||
then(done);
|
||||
|
||||
fsAccessCbs[0]();
|
||||
fsAccessCbs[1](null);
|
||||
|
||||
await expectAsync(Promise.all(existsPromises)).toBeResolvedTo([true, true]);
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with \'false\' if \'fs.access()\' errors', async () => {
|
||||
const existsPromises = [
|
||||
(bc as any).exists('foo'),
|
||||
(bc as any).exists('bar'),
|
||||
];
|
||||
it('should resolve with \'false\' if \'fs.access()\' errors', done => {
|
||||
Promise.
|
||||
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
|
||||
then(results => expect(results).toEqual([false, false])).
|
||||
then(done);
|
||||
|
||||
fsAccessCbs[0]('Error');
|
||||
fsAccessCbs[1](new Error());
|
||||
|
||||
await expectAsync(Promise.all(existsPromises)).toBeResolvedTo([false, false]);
|
||||
});
|
||||
|
||||
});
|
||||
@ -460,15 +495,12 @@ describe('BuildCreator', () => {
|
||||
consoleWarnSpy = spyOn(Logger.prototype, 'warn');
|
||||
shellChmodSpy = spyOn(shell, 'chmod');
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
cpExecSpy = spyOn(cp, 'exec').and.callFake(
|
||||
((_: string, cb: (...args: any[]) => void) =>
|
||||
cpExecCbs.push(cb)) as unknown as typeof cp.exec,
|
||||
);
|
||||
cpExecSpy = spyOn(cp, 'exec').and.callFake((_: string, cb: (...args: any[]) => void) => cpExecCbs.push(cb));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect((bc as any).extractArchive('foo', 'bar')).toBeInstanceOf(Promise);
|
||||
expect((bc as any).extractArchive('foo', 'bar')).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
@ -480,68 +512,78 @@ describe('BuildCreator', () => {
|
||||
});
|
||||
|
||||
|
||||
it('should log (as a warning) any stderr output if extracting succeeded', async () => {
|
||||
const extractPromise = (bc as any).extractArchive('foo', 'bar');
|
||||
it('should log (as a warning) any stderr output if extracting succeeded', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').
|
||||
then(() => expect(consoleWarnSpy).toHaveBeenCalledWith('This is stderr')).
|
||||
then(done);
|
||||
|
||||
cpExecCbs[0](null, 'This is stdout', 'This is stderr');
|
||||
|
||||
await expectAsync(extractPromise).toBeResolved();
|
||||
expect(consoleWarnSpy).toHaveBeenCalledWith('This is stderr');
|
||||
});
|
||||
|
||||
|
||||
it('should make the build directory non-writable', async () => {
|
||||
const extractPromise = (bc as any).extractArchive('foo', 'bar');
|
||||
cpExecCbs[0]();
|
||||
it('should make the build directory non-writable', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').
|
||||
then(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a-w', 'bar')).
|
||||
then(done);
|
||||
|
||||
await expectAsync(extractPromise).toBeResolved();
|
||||
expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a-w', 'bar');
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
|
||||
it('should delete the build artifact file on success', async () => {
|
||||
const extractPromise = (bc as any).extractArchive('input/file', 'output/dir');
|
||||
cpExecCbs[0]();
|
||||
it('should delete the build artifact file on success', done => {
|
||||
(bc as any).extractArchive('input/file', 'output/dir').
|
||||
then(() => expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file')).
|
||||
then(done);
|
||||
|
||||
await expectAsync(extractPromise).toBeResolved();
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file');
|
||||
cpExecCbs[0]();
|
||||
});
|
||||
|
||||
|
||||
describe('on error', () => {
|
||||
|
||||
it('should abort and skip further operations if it fails to extract the archive', async () => {
|
||||
const extractPromise = (bc as any).extractArchive('foo', 'bar');
|
||||
it('should abort and skip further operations if it fails to extract the archive', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).not.toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
cpExecCbs[0]('Test');
|
||||
|
||||
await expectAsync(extractPromise).toBeRejectedWith('Test');
|
||||
expect(shellChmodSpy).not.toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should abort and skip further operations if it fails to make non-writable', async () => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
shellChmodSpy.and.callFake(() => { throw 'Test'; });
|
||||
it('should abort and skip further operations if it fails to make non-writable', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
shellChmodSpy.and.callFake(() => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
throw 'Test';
|
||||
});
|
||||
|
||||
const extractPromise = (bc as any).extractArchive('foo', 'bar');
|
||||
cpExecCbs[0]();
|
||||
|
||||
await expectAsync(extractPromise).toBeRejectedWith('Test');
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should abort and reject if it fails to remove the build artifact file', async () => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
shellRmSpy.and.callFake(() => { throw 'Test'; });
|
||||
it('should abort and reject if it fails to remove the build artifact file', done => {
|
||||
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
|
||||
shellRmSpy.and.callFake(() => {
|
||||
// tslint:disable-next-line: no-string-throw
|
||||
throw 'Test';
|
||||
});
|
||||
|
||||
const extractPromise = (bc as any).extractArchive('foo', 'bar');
|
||||
cpExecCbs[0]();
|
||||
|
||||
await expectAsync(extractPromise).toBeRejectedWith('Test');
|
||||
expect(shellChmodSpy).toHaveBeenCalled();
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
});
|
||||
@ -558,54 +600,62 @@ describe('BuildCreator', () => {
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
shellLsSpy = spyOn(shell, 'ls').and.returnValue([] as unknown as shell.ShellArray);
|
||||
shellLsSpy = spyOn(shell, 'ls').and.returnValue([]);
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
it('should return a promise', done => {
|
||||
const promise = (bc as any).listShasByDate('input/dir');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `ls()`.
|
||||
|
||||
// Do not complete the test (and release the spies) synchronously to avoid running the actual `ls()`.
|
||||
await promise;
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should `ls()` files with their metadata', async () => {
|
||||
await (bc as any).listShasByDate('input/dir');
|
||||
expect(shellLsSpy).toHaveBeenCalledWith('-l', 'input/dir');
|
||||
it('should `ls()` files with their metadata', done => {
|
||||
(bc as any).listShasByDate('input/dir').
|
||||
then(() => expect(shellLsSpy).toHaveBeenCalledWith('-l', 'input/dir')).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject if listing files fails', async () => {
|
||||
shellLsSpy.and.rejectWith('Test');
|
||||
await expectAsync((bc as any).listShasByDate('input/dir')).toBeRejectedWith('Test');
|
||||
it('should reject if listing files fails', done => {
|
||||
shellLsSpy.and.callFake(() => Promise.reject('Test'));
|
||||
(bc as any).listShasByDate('input/dir').catch((err: string) => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should return the filenames', async () => {
|
||||
shellLsSpy.and.resolveTo([
|
||||
it('should return the filenames', done => {
|
||||
shellLsSpy.and.callFake(() => Promise.resolve([
|
||||
lsResult('foo', 100),
|
||||
lsResult('bar', 200),
|
||||
lsResult('baz', 300),
|
||||
]);
|
||||
]));
|
||||
|
||||
await expectAsync((bc as any).listShasByDate('input/dir')).toBeResolvedTo(['foo', 'bar', 'baz']);
|
||||
(bc as any).listShasByDate('input/dir').
|
||||
then((shas: string[]) => expect(shas).toEqual(['foo', 'bar', 'baz'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should sort by date', async () => {
|
||||
shellLsSpy.and.resolveTo([
|
||||
it('should sort by date', done => {
|
||||
shellLsSpy.and.callFake(() => Promise.resolve([
|
||||
lsResult('foo', 300),
|
||||
lsResult('bar', 100),
|
||||
lsResult('baz', 200),
|
||||
]);
|
||||
]));
|
||||
|
||||
await expectAsync((bc as any).listShasByDate('input/dir')).toBeResolvedTo(['bar', 'baz', 'foo']);
|
||||
(bc as any).listShasByDate('input/dir').
|
||||
then((shas: string[]) => expect(shas).toEqual(['bar', 'baz', 'foo'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not break with ShellJS\' custom `sort()` method', async () => {
|
||||
it('should not break with ShellJS\' custom `sort()` method', done => {
|
||||
const mockArray = [
|
||||
lsResult('foo', 300),
|
||||
lsResult('bar', 100),
|
||||
@ -613,21 +663,26 @@ describe('BuildCreator', () => {
|
||||
];
|
||||
mockArray.sort = jasmine.createSpy('sort');
|
||||
|
||||
shellLsSpy.and.resolveTo(mockArray);
|
||||
|
||||
await expectAsync((bc as any).listShasByDate('input/dir')).toBeResolvedTo(['bar', 'baz', 'foo']);
|
||||
expect(mockArray.sort).not.toHaveBeenCalled();
|
||||
shellLsSpy.and.callFake(() => Promise.resolve(mockArray));
|
||||
(bc as any).listShasByDate('input/dir').
|
||||
then((shas: string[]) => {
|
||||
expect(shas).toEqual(['bar', 'baz', 'foo']);
|
||||
expect(mockArray.sort).not.toHaveBeenCalled();
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should only include directories', async () => {
|
||||
shellLsSpy.and.resolveTo([
|
||||
it('should only include directories', done => {
|
||||
shellLsSpy.and.callFake(() => Promise.resolve([
|
||||
lsResult('foo', 100),
|
||||
lsResult('bar', 200, false),
|
||||
lsResult('baz', 300),
|
||||
]);
|
||||
]));
|
||||
|
||||
await expectAsync((bc as any).listShasByDate('input/dir')).toBeResolvedTo(['foo', 'baz']);
|
||||
(bc as any).listShasByDate('input/dir').
|
||||
then((shas: string[]) => expect(shas).toEqual(['foo', 'baz'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -32,18 +32,18 @@ describe('BuildRetriever', () => {
|
||||
};
|
||||
|
||||
api = new CircleCiApi('ORG', 'REPO', 'TOKEN');
|
||||
spyOn(api, 'getBuildInfo').and.resolveTo(BUILD_INFO);
|
||||
getBuildArtifactUrlSpy = spyOn(api, 'getBuildArtifactUrl').and.resolveTo(BASE_URL + ARTIFACT_PATH);
|
||||
spyOn(api, 'getBuildInfo').and.callFake(() => Promise.resolve(BUILD_INFO));
|
||||
getBuildArtifactUrlSpy = spyOn(api, 'getBuildArtifactUrl')
|
||||
.and.callFake(() => Promise.resolve(BASE_URL + ARTIFACT_PATH));
|
||||
|
||||
WRITEFILE_RESULT = undefined;
|
||||
writeFileSpy = spyOn(fs, 'writeFile').and.callFake(
|
||||
((_path: string, _buffer: Buffer, callback: fs.NoParamCallback) =>
|
||||
callback(WRITEFILE_RESULT)) as typeof fs.writeFile,
|
||||
(_path: string, _buffer: Buffer, callback: (err?: any) => {}) => callback(WRITEFILE_RESULT),
|
||||
);
|
||||
|
||||
EXISTS_RESULT = false;
|
||||
existsSpy = spyOn(fs, 'exists').and.callFake(
|
||||
((_path, callback) => callback(EXISTS_RESULT)) as typeof fs.exists,
|
||||
(_path: string, callback: (exists: boolean) => {}) => callback(EXISTS_RESULT),
|
||||
);
|
||||
});
|
||||
|
||||
@ -56,7 +56,6 @@ describe('BuildRetriever', () => {
|
||||
expect(() => new BuildRetriever(api, -1, DOWNLOAD_DIR))
|
||||
.toThrowError(`Invalid parameter "downloadSizeLimit" should be a number greater than 0.`);
|
||||
});
|
||||
|
||||
it('should fail if the "downloadDir" is missing', () => {
|
||||
expect(() => new BuildRetriever(api, MAX_DOWNLOAD_SIZE, ''))
|
||||
.toThrowError(`Missing or empty required parameter 'downloadDir'!`);
|
||||
@ -73,10 +72,14 @@ describe('BuildRetriever', () => {
|
||||
});
|
||||
|
||||
it('should error if it is not possible to extract the PR number from the branch', async () => {
|
||||
BUILD_INFO.branch = 'master';
|
||||
const retriever = new BuildRetriever(api, MAX_DOWNLOAD_SIZE, DOWNLOAD_DIR);
|
||||
|
||||
await expectAsync(retriever.getGithubInfo(12345)).toBeRejectedWithError('No PR found in branch field: master');
|
||||
try {
|
||||
BUILD_INFO.branch = 'master';
|
||||
await retriever.getGithubInfo(12345);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.message).toEqual('No PR found in branch field: master');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -107,10 +110,12 @@ describe('BuildRetriever', () => {
|
||||
it('should fail if the artifact is too large', async () => {
|
||||
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
|
||||
retriever = new BuildRetriever(api, 10, DOWNLOAD_DIR);
|
||||
|
||||
await expectAsync(retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH)).
|
||||
toBeRejectedWith(jasmine.objectContaining({status: 413}));
|
||||
|
||||
try {
|
||||
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.status).toEqual(413);
|
||||
}
|
||||
artifactRequest.done();
|
||||
});
|
||||
|
||||
@ -138,40 +143,50 @@ describe('BuildRetriever', () => {
|
||||
artifactRequest.done();
|
||||
});
|
||||
|
||||
it('should fail if the CircleCI API fails', async () => {
|
||||
getBuildArtifactUrlSpy.and.rejectWith('getBuildArtifactUrl failed');
|
||||
await expectAsync(retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH)).
|
||||
toBeRejectedWithError('CircleCI artifact download failed (getBuildArtifactUrl failed)');
|
||||
it('should fail if the CircleCI API fails', async () => {
|
||||
try {
|
||||
getBuildArtifactUrlSpy.and.callFake(() => Promise.reject('getBuildArtifactUrl failed'));
|
||||
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.message).toEqual('CircleCI artifact download failed (getBuildArtifactUrl failed)');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail if the URL fetch errors', async () => {
|
||||
it('should fail if the URL fetch errors', async () => {
|
||||
// create a new handler that errors
|
||||
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).replyWithError('Artifact Request Failed');
|
||||
|
||||
await expectAsync(retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH)).toBeRejectedWithError(
|
||||
'CircleCI artifact download failed ' +
|
||||
try {
|
||||
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.message).toEqual('CircleCI artifact download failed ' +
|
||||
'(request to http://test.com/some/path/build.zip failed, reason: Artifact Request Failed)');
|
||||
|
||||
}
|
||||
artifactRequest.done();
|
||||
});
|
||||
|
||||
it('should fail if the URL fetch 404s', async () => {
|
||||
it('should fail if the URL fetch 404s', async () => {
|
||||
// create a new handler that errors
|
||||
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(404, 'No such artifact');
|
||||
|
||||
await expectAsync(retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH)).
|
||||
toBeRejectedWithError('CircleCI artifact download failed (Error 404 - Not Found)');
|
||||
|
||||
try {
|
||||
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.message).toEqual('CircleCI artifact download failed (Error 404 - Not Found)');
|
||||
}
|
||||
artifactRequest.done();
|
||||
});
|
||||
|
||||
it('should fail if file write fails', async () => {
|
||||
it('should fail if file write fails', async () => {
|
||||
const artifactRequest = nock(BASE_URL).get(ARTIFACT_PATH).reply(200, ARTIFACT_CONTENTS);
|
||||
WRITEFILE_RESULT = 'Test Error';
|
||||
|
||||
await expectAsync(retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH)).
|
||||
toBeRejectedWithError('CircleCI artifact download failed (Test Error)');
|
||||
|
||||
try {
|
||||
WRITEFILE_RESULT = 'Test Error';
|
||||
await retriever.downloadBuildArtifact(12345, 777, 'COMMIT', ARTIFACT_PATH);
|
||||
throw new Error('Exception Expected');
|
||||
} catch (error) {
|
||||
expect(error.message).toEqual('CircleCI artifact download failed (Test Error)');
|
||||
}
|
||||
artifactRequest.done();
|
||||
});
|
||||
});
|
||||
|
@ -51,10 +51,7 @@ describe('BuildVerifier', () => {
|
||||
describe('getSignificantFilesChanged', () => {
|
||||
it('should return false if none of the fetched files match the given pattern', async () => {
|
||||
const fetchFilesSpy = spyOn(prs, 'fetchFiles');
|
||||
fetchFilesSpy.and.resolveTo([
|
||||
{filename: 'a/b/c', sha: 'a1'},
|
||||
{filename: 'd/e/f', sha: 'b2'},
|
||||
]);
|
||||
fetchFilesSpy.and.callFake(() => Promise.resolve([{filename: 'a/b/c'}, {filename: 'd/e/f'}]));
|
||||
expect(await bv.getSignificantFilesChanged(777, /^x/)).toEqual(false);
|
||||
expect(fetchFilesSpy).toHaveBeenCalledWith(777);
|
||||
|
||||
@ -81,30 +78,37 @@ describe('BuildVerifier', () => {
|
||||
user: {login: 'username'},
|
||||
};
|
||||
|
||||
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').and.resolveTo(mockPrInfo);
|
||||
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').and.resolveTo(true);
|
||||
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
|
||||
and.callFake(() => Promise.resolve(mockPrInfo));
|
||||
|
||||
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
|
||||
and.callFake(() => Promise.resolve(true));
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
it('should return a promise', done => {
|
||||
const promise = bv.getPrIsTrusted(pr);
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
|
||||
|
||||
// Do not complete the test (and release the spies) synchronously to avoid running the actual
|
||||
// `GithubTeams#isMemberBySlug()`.
|
||||
await promise;
|
||||
expect(promise).toEqual(jasmine.any(Promise));
|
||||
});
|
||||
|
||||
|
||||
it('should fetch the corresponding PR', async () => {
|
||||
await bv.getPrIsTrusted(pr);
|
||||
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
|
||||
it('should fetch the corresponding PR', done => {
|
||||
bv.getPrIsTrusted(pr).then(() => {
|
||||
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if fetching the PR errors', async () => {
|
||||
prsFetchSpy.and.rejectWith('Test');
|
||||
await expectAsync(bv.getPrIsTrusted(pr)).toBeRejectedWith('Test');
|
||||
it('should fail if fetching the PR errors', done => {
|
||||
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
|
||||
bv.getPrIsTrusted(pr).catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -113,14 +117,19 @@ describe('BuildVerifier', () => {
|
||||
beforeEach(() => mockPrInfo.labels.push({name: 'trusted: pr-label'}));
|
||||
|
||||
|
||||
it('should resolve to true', async () => {
|
||||
await expectAsync(bv.getPrIsTrusted(pr)).toBeResolvedTo(true);
|
||||
it('should resolve to true', done => {
|
||||
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||
expect(isTrusted).toBe(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should not try to verify the author\'s membership status', async () => {
|
||||
await expectAsync(bv.getPrIsTrusted(pr));
|
||||
expect(teamsIsMemberBySlugSpy).not.toHaveBeenCalled();
|
||||
it('should not try to verify the author\'s membership status', done => {
|
||||
bv.getPrIsTrusted(pr).then(() => {
|
||||
expect(teamsIsMemberBySlugSpy).not.toHaveBeenCalled();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@ -128,27 +137,40 @@ describe('BuildVerifier', () => {
|
||||
|
||||
describe('when the PR does not have the "trusted PR" label', () => {
|
||||
|
||||
it('should verify the PR author\'s membership in the specified teams', async () => {
|
||||
await bv.getPrIsTrusted(pr);
|
||||
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
|
||||
it('should verify the PR author\'s membership in the specified teams', done => {
|
||||
bv.getPrIsTrusted(pr).then(() => {
|
||||
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should fail if verifying membership errors', async () => {
|
||||
teamsIsMemberBySlugSpy.and.rejectWith('Test');
|
||||
await expectAsync(bv.getPrIsTrusted(pr)).toBeRejectedWith('Test');
|
||||
it('should fail if verifying membership errors', done => {
|
||||
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
|
||||
bv.getPrIsTrusted(pr).catch(err => {
|
||||
expect(err).toBe('Test');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve to true if the PR\'s author is a member', async () => {
|
||||
teamsIsMemberBySlugSpy.and.resolveTo(true);
|
||||
await expectAsync(bv.getPrIsTrusted(pr)).toBeResolvedTo(true);
|
||||
it('should resolve to true if the PR\'s author is a member', done => {
|
||||
teamsIsMemberBySlugSpy.and.callFake(() => Promise.resolve(true));
|
||||
|
||||
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||
expect(isTrusted).toBe(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should resolve to false if the PR\'s author is not a member', async () => {
|
||||
teamsIsMemberBySlugSpy.and.resolveTo(false);
|
||||
await expectAsync(bv.getPrIsTrusted(pr)).toBeResolvedTo(false);
|
||||
it('should resolve to false if the PR\'s author is not a member', done => {
|
||||
teamsIsMemberBySlugSpy.and.callFake(() => Promise.resolve(false));
|
||||
|
||||
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||
expect(isTrusted).toBe(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -0,0 +1,11 @@
|
||||
import {PreviewServerError} from '../../lib/preview-server/preview-error';
|
||||
|
||||
export const expectToBePreviewServerError = (actual: PreviewServerError, status?: number, message?: string) => {
|
||||
expect(actual).toEqual(jasmine.any(PreviewServerError));
|
||||
if (status != null) {
|
||||
expect(actual.status).toBe(status);
|
||||
}
|
||||
if (message != null) {
|
||||
expect(actual.message).toBe(message);
|
||||
}
|
||||
};
|
@ -1,5 +0,0 @@
|
||||
declare module jasmine {
|
||||
interface AsyncMatchers {
|
||||
toBeRejectedWithPreviewServerError(status: number, message?: string | RegExp): Promise<void>;
|
||||
}
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
import {PreviewServerError} from '../../lib/preview-server/preview-error';
|
||||
|
||||
|
||||
// Matchers
|
||||
const toBeRejectedWithPreviewServerError: jasmine.CustomAsyncMatcherFactory = () => {
|
||||
return {
|
||||
async compare(actualPromise: Promise<never>, expectedStatus: number, expectedMessage?: string | RegExp) {
|
||||
if (!(actualPromise instanceof Promise)) {
|
||||
throw new Error(`Expected '${toBeRejectedWithPreviewServerError.name}()' to be called on a promise.`);
|
||||
}
|
||||
|
||||
try {
|
||||
await actualPromise;
|
||||
|
||||
return {
|
||||
pass: false,
|
||||
message: `Expected a promise to be rejected with a '${PreviewServerError.name}', but it was resolved.`,
|
||||
};
|
||||
} catch (actualError) {
|
||||
const actualPrintValue = stringify(actualError);
|
||||
const expectedPrintValue =
|
||||
stringify(new PreviewServerError(expectedStatus, expectedMessage && `${expectedMessage}`));
|
||||
|
||||
const pass = errorMatches(actualError, expectedStatus, expectedMessage);
|
||||
const message =
|
||||
`Expected a promise ${pass ? 'not ' : ''}to be rejected with ${expectedPrintValue}, but is was` +
|
||||
`${pass ? '' : ` rejected with ${actualPrintValue}`}.`;
|
||||
|
||||
return {pass, message};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Helpers
|
||||
function errorMatches(actualErr: unknown, expectedStatus: number, expectedMsg?: string | RegExp): boolean {
|
||||
if (!(actualErr instanceof PreviewServerError)) return false;
|
||||
if (actualErr.status !== expectedStatus) return false;
|
||||
return messageMatches(actualErr.message, expectedMsg);
|
||||
}
|
||||
|
||||
function messageMatches(actualMsg: string, expectedMsg?: string | RegExp): boolean {
|
||||
if (typeof expectedMsg === 'undefined') return true;
|
||||
if (typeof expectedMsg === 'string') return actualMsg === expectedMsg;
|
||||
return expectedMsg.test(actualMsg);
|
||||
}
|
||||
|
||||
function stringify(value: unknown): string {
|
||||
if (value instanceof PreviewServerError) {
|
||||
return `${PreviewServerError.name}(${value.status}${value.message ? `, ${value.message}` : ''})`;
|
||||
}
|
||||
|
||||
return jasmine.pp(value);
|
||||
}
|
||||
};
|
||||
|
||||
// Exports
|
||||
export const customAsyncMatchers: jasmine.CustomAsyncMatcherFactories = {
|
||||
toBeRejectedWithPreviewServerError,
|
||||
};
|
@ -9,8 +9,8 @@ describe('PreviewServerError', () => {
|
||||
|
||||
|
||||
it('should extend Error', () => {
|
||||
expect(err).toBeInstanceOf(PreviewServerError);
|
||||
expect(err).toBeInstanceOf(Error);
|
||||
expect(err).toEqual(jasmine.any(PreviewServerError));
|
||||
expect(err).toEqual(jasmine.any(Error));
|
||||
|
||||
expect(Object.getPrototypeOf(err)).toBe(PreviewServerError.prototype);
|
||||
});
|
||||
|
@ -1,4 +1,5 @@
|
||||
// Imports
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import * as supertest from 'supertest';
|
||||
import {CircleCiApi} from '../../lib/common/circle-ci-api';
|
||||
@ -133,7 +134,7 @@ describe('PreviewServerFactory', () => {
|
||||
const buildCreator = jasmine.any(BuildCreator);
|
||||
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(buildRetriever, buildVerifier, buildCreator, defaultConfig);
|
||||
|
||||
const middleware = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
|
||||
const middleware: express.Express = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
|
||||
expect(httpCreateServerSpy).toHaveBeenCalledWith(middleware);
|
||||
});
|
||||
|
||||
@ -229,7 +230,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
expect(prsAddCommentSpy).toHaveBeenCalledTimes(2);
|
||||
expect(prs).toBe(allCalls[1].object);
|
||||
expect(prs).toBeInstanceOf(GithubPullRequests);
|
||||
expect(prs).toEqual(jasmine.any(GithubPullRequests));
|
||||
expect(prs.repoSlug).toBe('organisation/repo');
|
||||
});
|
||||
|
||||
@ -301,8 +302,9 @@ describe('PreviewServerFactory', () => {
|
||||
let bvGetSignificantFilesChangedSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
bvGetPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted').and.resolveTo(true);
|
||||
bvGetSignificantFilesChangedSpy = spyOn(buildVerifier, 'getSignificantFilesChanged').and.resolveTo(true);
|
||||
bvGetPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted').and.returnValue(Promise.resolve(true));
|
||||
bvGetSignificantFilesChangedSpy = spyOn(buildVerifier, 'getSignificantFilesChanged').
|
||||
and.returnValue(Promise.resolve(true));
|
||||
});
|
||||
|
||||
|
||||
@ -329,7 +331,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond appropriately if the PR did not touch any significant files', async () => {
|
||||
bvGetSignificantFilesChangedSpy.and.resolveTo(false);
|
||||
bvGetSignificantFilesChangedSpy.and.returnValue(Promise.resolve(false));
|
||||
|
||||
const expectedResponse = {canHavePublicPreview: false, reason: 'No significant files touched.'};
|
||||
const expectedLog = `PR:${pr} - Cannot have a public preview, because it did not touch any significant files.`;
|
||||
@ -343,7 +345,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond appropriately if the PR is not automatically verifiable as "trusted"', async () => {
|
||||
bvGetPrIsTrustedSpy.and.resolveTo(false);
|
||||
bvGetPrIsTrustedSpy.and.returnValue(Promise.resolve(false));
|
||||
|
||||
const expectedResponse = {canHavePublicPreview: false, reason: 'Not automatically verifiable as "trusted".'};
|
||||
const expectedLog =
|
||||
@ -370,7 +372,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond with error if `getSignificantFilesChanged()` fails', async () => {
|
||||
bvGetSignificantFilesChangedSpy.and.rejectWith('getSignificantFilesChanged error');
|
||||
bvGetSignificantFilesChangedSpy.and.callFake(() => Promise.reject('getSignificantFilesChanged error'));
|
||||
|
||||
await agent.get(url).expect(500, 'getSignificantFilesChanged error');
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('Previewability check error', 'getSignificantFilesChanged error');
|
||||
@ -378,10 +380,11 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond with error if `getPrIsTrusted()` fails', async () => {
|
||||
bvGetPrIsTrustedSpy.and.throwError('getPrIsTrusted error');
|
||||
const error = new Error('getPrIsTrusted error');
|
||||
bvGetPrIsTrustedSpy.and.callFake(() => { throw error; });
|
||||
|
||||
await agent.get(url).expect(500, 'getPrIsTrusted error');
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('Previewability check error', new Error('getPrIsTrusted error'));
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('Previewability check error', error);
|
||||
});
|
||||
|
||||
});
|
||||
@ -494,7 +497,7 @@ describe('PreviewServerFactory', () => {
|
||||
// Note it is important to put the `reject` into `and.callFake`;
|
||||
// If you just `and.returnValue` the rejected promise
|
||||
// then you get an "unhandled rejection" message in the console.
|
||||
getGithubInfoSpy.and.rejectWith('Test Error');
|
||||
getGithubInfoSpy.and.callFake(() => Promise.reject('Test Error'));
|
||||
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
|
||||
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
|
||||
expect(downloadBuildArtifactSpy).not.toHaveBeenCalled();
|
||||
@ -515,7 +518,7 @@ describe('PreviewServerFactory', () => {
|
||||
});
|
||||
|
||||
it('should fail if the artifact fetch request fails', async () => {
|
||||
downloadBuildArtifactSpy.and.rejectWith('Test Error');
|
||||
downloadBuildArtifactSpy.and.callFake(() => Promise.reject('Test Error'));
|
||||
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
|
||||
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
|
||||
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
|
||||
@ -524,7 +527,7 @@ describe('PreviewServerFactory', () => {
|
||||
});
|
||||
|
||||
it('should fail if verifying the PR fails', async () => {
|
||||
getPrIsTrustedSpy.and.rejectWith('Test Error');
|
||||
getPrIsTrustedSpy.and.callFake(() => Promise.reject('Test Error'));
|
||||
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
|
||||
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
|
||||
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
|
||||
@ -533,7 +536,7 @@ describe('PreviewServerFactory', () => {
|
||||
});
|
||||
|
||||
it('should fail if creating the preview build fails', async () => {
|
||||
createBuildSpy.and.rejectWith('Test Error');
|
||||
createBuildSpy.and.callFake(() => Promise.reject('Test Error'));
|
||||
await agent.post(URL).send(BASIC_PAYLOAD).expect(500, 'Test Error');
|
||||
expect(getGithubInfoSpy).toHaveBeenCalledWith(BUILD_NUM);
|
||||
expect(downloadBuildArtifactSpy).toHaveBeenCalled();
|
||||
@ -602,7 +605,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should propagate errors from BuildVerifier', async () => {
|
||||
bvGetPrIsTrustedSpy.and.rejectWith('Test');
|
||||
bvGetPrIsTrustedSpy.and.callFake(() => Promise.reject('Test'));
|
||||
|
||||
await createRequest(+pr).expect(500, 'Test');
|
||||
|
||||
@ -612,9 +615,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should call \'BuildCreator#updatePrVisibility()\' with the correct arguments', async () => {
|
||||
bvGetPrIsTrustedSpy.
|
||||
withArgs(24).and.resolveTo(false).
|
||||
withArgs(42).and.resolveTo(true);
|
||||
bvGetPrIsTrustedSpy.and.callFake((pr2: number) => Promise.resolve(pr2 === 42));
|
||||
|
||||
await createRequest(24);
|
||||
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(24, false);
|
||||
@ -625,7 +626,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should propagate errors from BuildCreator', async () => {
|
||||
bcUpdatePrVisibilitySpy.and.rejectWith('Test');
|
||||
bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject('Test'));
|
||||
await createRequest(+pr).expect(500, 'Test');
|
||||
});
|
||||
|
||||
@ -633,9 +634,7 @@ describe('PreviewServerFactory', () => {
|
||||
describe('on success', () => {
|
||||
|
||||
it('should respond with 200 (action: undefined)', async () => {
|
||||
bvGetPrIsTrustedSpy.
|
||||
withArgs(2).and.resolveTo(false).
|
||||
withArgs(4).and.resolveTo(true);
|
||||
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||
|
||||
const reqs = [4, 2].map(num => createRequest(num).expect(200, http.STATUS_CODES[200]));
|
||||
await Promise.all(reqs);
|
||||
@ -643,9 +642,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond with 200 (action: labeled)', async () => {
|
||||
bvGetPrIsTrustedSpy.
|
||||
withArgs(2).and.resolveTo(false).
|
||||
withArgs(4).and.resolveTo(true);
|
||||
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||
|
||||
const reqs = [4, 2].map(num => createRequest(num, 'labeled').expect(200, http.STATUS_CODES[200]));
|
||||
await Promise.all(reqs);
|
||||
@ -653,9 +650,7 @@ describe('PreviewServerFactory', () => {
|
||||
|
||||
|
||||
it('should respond with 200 (action: unlabeled)', async () => {
|
||||
bvGetPrIsTrustedSpy.
|
||||
withArgs(2).and.resolveTo(false).
|
||||
withArgs(4).and.resolveTo(true);
|
||||
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||
|
||||
const reqs = [4, 2].map(num => createRequest(num, 'unlabeled').expect(200, http.STATUS_CODES[200]));
|
||||
await Promise.all(reqs);
|
||||
|
@ -39,7 +39,7 @@ describe('preview-server/utils', () => {
|
||||
throwRequestError(505, 'ERROR MESSAGE', request);
|
||||
} catch (error) {
|
||||
caught = true;
|
||||
expect(error).toBeInstanceOf(PreviewServerError);
|
||||
expect(error).toEqual(jasmine.any(PreviewServerError));
|
||||
expect(error.status).toEqual(505);
|
||||
expect(error.message).toEqual(`ERROR MESSAGE in request: POST some.domain.com/path "The request body"`);
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -8,32 +8,10 @@ exitCode=0
|
||||
|
||||
|
||||
# Helpers
|
||||
function checkCert {
|
||||
local certPath=$1
|
||||
|
||||
if [[ ! -f "$certPath" ]]; then
|
||||
echo "Certificate '$certPath' does not exist. Skipping expiration check..."
|
||||
return
|
||||
fi
|
||||
|
||||
openssl x509 -checkend 0 -in "$certPath" -noout > /dev/null
|
||||
reportStatus "Certificate '$certPath'"
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo " [WARN]"
|
||||
echo " If you did not provide the certificate explicitly, try running the"
|
||||
echo " 'docker build' command again with the '--no-cache' option to generate"
|
||||
echo " a new self-signed certificate."
|
||||
fi
|
||||
}
|
||||
|
||||
function reportStatus {
|
||||
local lastExitCode=$?
|
||||
|
||||
echo "$1: $([[ $lastExitCode -eq 0 ]] && echo OK || echo NOT OK)"
|
||||
[[ $lastExitCode -eq 0 ]] || exitCode=1
|
||||
|
||||
return $lastExitCode
|
||||
}
|
||||
|
||||
|
||||
@ -50,16 +28,6 @@ for s in ${services[@]}; do
|
||||
done
|
||||
|
||||
|
||||
# Check SSL/TLS certificates expiration
|
||||
certs=(
|
||||
"$AIO_LOCALCERTS_DIR/$AIO_DOMAIN_NAME.crt"
|
||||
"$TEST_AIO_LOCALCERTS_DIR/$TEST_AIO_DOMAIN_NAME.crt"
|
||||
)
|
||||
for c in ${certs[@]}; do
|
||||
checkCert $c
|
||||
done
|
||||
|
||||
|
||||
# Check servers
|
||||
origins=(
|
||||
http://$AIO_PREVIEW_SERVER_HOSTNAME:$AIO_PREVIEW_SERVER_PORT
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
## Create `aio-builds` persistent disk (if not already exists)
|
||||
- Follow instructions [here](https://cloud.google.com/compute/docs/disks/add-persistent-disk#create_disk).
|
||||
- `sudo mkfs.ext4 -m 0 -E lazy_itable_init=0,lazy_journal_init=0,discard /dev/disk/by-id/google-aio-builds`
|
||||
- `sudo mkfs.ext4 -F -E lazy_itable_init=0,lazy_journal_init=0,discard /dev/disk/by-id/google-aio-builds`
|
||||
|
||||
|
||||
## Mount disk
|
||||
@ -14,7 +14,7 @@
|
||||
|
||||
## Mount disk on boot
|
||||
- Run:
|
||||
```sh
|
||||
echo UUID=`sudo blkid -s UUID -o value /dev/disk/by-id/google-aio-builds` \
|
||||
/mnt/disks/aio-builds ext4 defaults,discard,nofail 0 2 | sudo tee -a /etc/fstab
|
||||
```
|
||||
echo UUID=`sudo blkid -s UUID -o value /dev/disk/by-id/google-aio-builds` \
|
||||
/mnt/disks/aio-builds ext4 discard,defaults,nofail 0 2 | sudo tee -a /etc/fstab
|
||||
```
|
||||
|
@ -1,11 +1,10 @@
|
||||
# VM setup - Create docker image
|
||||
|
||||
|
||||
## Install git, Node.js and yarn
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install -y git`
|
||||
- Install the latest stable version of [Node.js](https://nodejs.org/en/download).
|
||||
- Install the latest stable version of [yarn](https://classic.yarnpkg.com/en/docs/install).
|
||||
## Install node and yarn
|
||||
- Install [nvm](https://github.com/creationix/nvm#installation).
|
||||
- Install node.js: `nvm install 8`
|
||||
- Install yarn: `npm -g install yarn`
|
||||
|
||||
|
||||
## Checkout repository
|
||||
@ -17,11 +16,7 @@
|
||||
- You can overwrite the default environment variables inside the image, by passing new values using
|
||||
`--build-arg`.
|
||||
|
||||
**Note 1:** The script has to execute docker commands with `sudo`.
|
||||
|
||||
**Note 2:**
|
||||
The script has to execute `yarn` commands, so make sure `yarn` is on the `PATH` when invoking the
|
||||
script.
|
||||
**Note:** The script has to execute docker commands with `sudo`.
|
||||
|
||||
|
||||
## Example
|
||||
@ -31,7 +26,7 @@ The following commands would create a docker image from GitHub repo `foo/bar` to
|
||||
|
||||
- `git clone https://github.com/foo/bar.git foobar`
|
||||
- Run:
|
||||
```sh
|
||||
```
|
||||
./foobar/aio-builds-setup/scripts/create-image.sh foobar-builds \
|
||||
--build-arg AIO_REPO_SLUG=foo/bar \
|
||||
--build-arg AIO_DOMAIN_NAME=foobar-builds.io \
|
||||
|
@ -3,17 +3,24 @@
|
||||
|
||||
## Install docker
|
||||
|
||||
Official installation instructions: https://docs.docker.com/engine/install
|
||||
Example:
|
||||
_Debian (jessie):_
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install -y apt-transport-https ca-certificates curl git software-properties-common`
|
||||
- `curl -fsSL https://apt.dockerproject.org/gpg | sudo apt-key add -`
|
||||
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
|
||||
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ debian-$(lsb_release -cs) main"`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get -y install docker-engine`
|
||||
|
||||
_Debian (buster):_
|
||||
_Ubuntu (16.04):_
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common`
|
||||
- `curl -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add -`
|
||||
- `sudo apt-key fingerprint 0EBFCD88`
|
||||
- `sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable"`
|
||||
- `sudo apt-get install -y curl git linux-image-extra-$(uname -r) linux-image-extra-virtual`
|
||||
- `sudo apt-get install -y apt-transport-https ca-certificates`
|
||||
- `curl -fsSL https://yum.dockerproject.org/gpg | sudo apt-key add -`
|
||||
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
|
||||
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ ubuntu-$(lsb_release -cs) main"`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get -y install docker-ce docker-ce-cli containerd.io`
|
||||
- `sudo apt-get -y install docker-engine`
|
||||
|
||||
|
||||
## Start the docker
|
||||
|
@ -8,16 +8,16 @@ VM host to update the preview server based on changes in the source code.
|
||||
|
||||
The script will pull the latest changes from the origin's master branch and examine if there have
|
||||
been any changes in files inside the preview server source code directory (see below). If there are,
|
||||
it will create a new image and verify that it works as expected. Finally, it will stop and remove
|
||||
it will create a new image and verify that is works as expected. Finally, it will stop and remove
|
||||
the old docker container and image, create a new container based on the new image and start it.
|
||||
|
||||
The script assumes that the preview server source code is in the repository's
|
||||
`aio/aio-builds-setup/` directory and expects the following inputs:
|
||||
|
||||
- **$1**: `HOST_REPO_DIR`
|
||||
- **$2**: `HOST_SECRETS_DIR`
|
||||
- **$3**: `HOST_BUILDS_DIR`
|
||||
- **$4**: `HOST_LOCALCERTS_DIR`
|
||||
- **$2**: `HOST_LOCALCERTS_DIR`
|
||||
- **$3**: `HOST_SECRETS_DIR`
|
||||
- **$4**: `HOST_BUILDS_DIR`
|
||||
- **$5**: `HOST_LOGS_DIR`
|
||||
|
||||
See [here](vm-setup--create-host-dirs-and-files.md) for more info on what each input directory is
|
||||
@ -25,38 +25,28 @@ used for.
|
||||
|
||||
**Note 1:** The script has to execute docker commands with `sudo`.
|
||||
|
||||
**Note 2:**
|
||||
The script has to execute `yarn` commands, so make sure `yarn` is on the `PATH` when invoking the
|
||||
script.
|
||||
|
||||
**Note 3:** Make sure the user that executes the script has access to update the repository.
|
||||
**Note 2:** Make sure the user that executes the script has access to update the repository
|
||||
|
||||
|
||||
## Run the script manually
|
||||
You may choose to manually run the script, when necessary. Example:
|
||||
|
||||
```sh
|
||||
```
|
||||
update-preview-server.sh \
|
||||
/path/to/repo \
|
||||
/path/to/localcerts \
|
||||
/path/to/secrets \
|
||||
/path/to/builds \
|
||||
/path/to/localcerts \
|
||||
/path/to/logs
|
||||
```
|
||||
|
||||
|
||||
## Run the script automatically
|
||||
You may choose to automatically trigger the script, e.g. using a cronjob. For example, the following
|
||||
cronjob entry would run the script every 30 minutes, update the preview server (if necessary) and
|
||||
log its output to `update-preview-server.log` (assuming the user has the necessary permissions):
|
||||
cronjob entry would run the script every hour and update the preview server (assuming the user has
|
||||
the necessary permissions):
|
||||
|
||||
```
|
||||
# Periodically check for changes and update the preview server (if necessary)
|
||||
*/30 * * * * /path/to/update-preview-server.sh /path/to/repo /path/to/secrets /path/to/builds /path/to/localcerts /path/to/logs >> /path/to/update-preview-server.log 2>&1
|
||||
*/30 * * * * /path/to/update-preview-server.sh /path/to/repo /path/to/localcerts /path/to/secrets /path/to/builds /path/to/logs
|
||||
```
|
||||
|
||||
**Note:**
|
||||
Keep in mind that cron jobs run in non-interactive, non-login shells. This means that the execution
|
||||
context might be different compared to when running the same commands from an interactive, login
|
||||
shell. For example, `.bashrc` files are normally _not_ sourced automatically in cron jobs. See
|
||||
[here](http://www.gnu.org/software/bash/manual/html_node/Bash-Startup-Files.html) for more info.
|
||||
|
@ -7,14 +7,13 @@ echo -e "\n\n[`date`] - Updating the preview server..."
|
||||
|
||||
# Input
|
||||
readonly HOST_REPO_DIR=$1
|
||||
readonly HOST_SECRETS_DIR=$2
|
||||
readonly HOST_BUILDS_DIR=$3
|
||||
readonly HOST_LOCALCERTS_DIR=$4
|
||||
readonly HOST_LOCALCERTS_DIR=$2
|
||||
readonly HOST_SECRETS_DIR=$3
|
||||
readonly HOST_BUILDS_DIR=$4
|
||||
readonly HOST_LOGS_DIR=$5
|
||||
|
||||
# Constants
|
||||
readonly PROVISIONAL_TAG=provisional
|
||||
readonly PROVISIONAL_IMAGE_NAME=aio-builds:$PROVISIONAL_TAG
|
||||
readonly PROVISIONAL_IMAGE_NAME=aio-builds:provisional
|
||||
readonly LATEST_IMAGE_NAME=aio-builds:latest
|
||||
readonly CONTAINER_NAME=aio
|
||||
|
||||
@ -31,7 +30,7 @@ readonly CONTAINER_NAME=aio
|
||||
# Do not update the server unless files inside `aio-builds-setup/` have changed
|
||||
# or the last attempt failed (identified by the provisional image still being around).
|
||||
readonly relevantChangedFilesCount=$(git diff --name-only $lastDeployedCommit...HEAD | grep -P "^aio/aio-builds-setup/" | wc -l)
|
||||
readonly lastAttemptFailed=$(sudo docker image ls | grep "$PROVISIONAL_TAG" >> /dev/fd/3 && echo "true" || echo "false")
|
||||
readonly lastAttemptFailed=$(sudo docker rmi "$PROVISIONAL_IMAGE_NAME" >> /dev/fd/3 && echo "true" || echo "false")
|
||||
if [[ $relevantChangedFilesCount -eq 0 ]] && [[ "$lastAttemptFailed" != "true" ]]; then
|
||||
echo "Skipping update because no relevant files have been touched."
|
||||
exit 0
|
||||
@ -61,9 +60,9 @@ readonly CONTAINER_NAME=aio
|
||||
--publish 80:80 \
|
||||
--publish 443:443 \
|
||||
--restart unless-stopped \
|
||||
--volume $HOST_LOCALCERTS_DIR:/etc/ssl/localcerts:ro \
|
||||
--volume $HOST_SECRETS_DIR:/aio-secrets:ro \
|
||||
--volume $HOST_BUILDS_DIR:/var/www/aio-builds \
|
||||
--volume $HOST_LOCALCERTS_DIR:/etc/ssl/localcerts:ro \
|
||||
--volume $HOST_LOGS_DIR:/var/log/aio \
|
||||
"$LATEST_IMAGE_NAME"
|
||||
|
||||
|
@ -109,3 +109,9 @@ Options that specify files can be given as absolute paths, or as paths relative
|
||||
The [ng generate](cli/generate) and [ng add](cli/add) commands take as an argument the artifact or library to be generated or added to the current project.
|
||||
In addition to any general options, each artifact or library defines its own options in a *schematic*.
|
||||
Schematic options are supplied to the command in the same format as immediate command options.
|
||||
|
||||
|
||||
### Building with Bazel
|
||||
|
||||
Optionally, you can configure the Angular CLI to use [Bazel](https://docs.bazel.build) as the build tool. For more information, see [Building with Bazel](guide/bazel).
|
||||
|
||||
|
5
aio/content/examples/.gitignore
vendored
5
aio/content/examples/.gitignore
vendored
@ -82,6 +82,9 @@ upgrade-phonecat-2-hybrid/aot/**/*
|
||||
# styleguide
|
||||
!styleguide/src/systemjs.custom.js
|
||||
|
||||
# universal
|
||||
!universal/webpack.server.config.js
|
||||
|
||||
# stackblitz
|
||||
*stackblitz.no-link.html
|
||||
|
||||
@ -94,4 +97,4 @@ upgrade-phonecat-3-final/rollup-config.js
|
||||
!upgrade-phonecat-*/**/karma-test-shim.js
|
||||
|
||||
# schematics
|
||||
!schematics-for-libraries/projects/my-lib/package.json
|
||||
!schematics-for-libraries/projects/my-lib/package.json
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
});
|
||||
TestBed.compileComponents();
|
||||
});
|
||||
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
|
||||
it(`should have as title 'app works!'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app works!');
|
||||
}));
|
||||
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('app works!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,8 @@
|
||||
import { ItemDirective } from './item.directive';
|
||||
|
||||
describe('ItemDirective', () => {
|
||||
it('should create an instance', () => {
|
||||
const directive = new ItemDirective();
|
||||
expect(directive).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemDetailComponent } from './item-detail.component';
|
||||
|
||||
describe('ItemDetailComponent', () => {
|
||||
let component: ItemDetailComponent;
|
||||
let fixture: ComponentFixture<ItemDetailComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemDetailComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemDetailComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,16 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
});
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"tests": [
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [
|
||||
|
@ -0,0 +1,13 @@
|
||||
{
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [
|
||||
"e2e",
|
||||
"--protractor-config=e2e/protractor-puppeteer.conf.js",
|
||||
"--no-webdriver-update",
|
||||
"--port={PORT}"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'Featured product:'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('Featured product:');
|
||||
}));
|
||||
it('should render title in a p tag', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('p').textContent).toContain('Featured product:');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemDetailComponent } from './item-detail.component';
|
||||
|
||||
describe('ItemDetailComponent', () => {
|
||||
let component: ItemDetailComponent;
|
||||
let fixture: ComponentFixture<ItemDetailComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemDetailComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemDetailComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
});
|
||||
TestBed.compileComponents();
|
||||
});
|
||||
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
|
||||
it(`should have as title 'app works!'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app works!');
|
||||
}));
|
||||
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('app works!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { CustomerDashboardComponent } from './customer-dashboard.component';
|
||||
|
||||
describe('CustomerDashboardComponent', () => {
|
||||
let component: CustomerDashboardComponent;
|
||||
let fixture: ComponentFixture<CustomerDashboardComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ CustomerDashboardComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(CustomerDashboardComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"tests": [
|
||||
{"cmd": "yarn", "args": ["test", "--browsers=ChromeHeadless", "--no-watch"]},
|
||||
{"cmd": "yarn", "args": ["e2e", "--prod", "--protractor-config=e2e/protractor-puppeteer.conf.js", "--no-webdriver-update", "--port={PORT}"]}
|
||||
]
|
||||
}
|
||||
|
@ -0,0 +1,13 @@
|
||||
import { ReactiveModule } from './reactive.module';
|
||||
|
||||
describe('ReactiveModule', () => {
|
||||
let reactiveModule: ReactiveModule;
|
||||
|
||||
beforeEach(() => {
|
||||
reactiveModule = new ReactiveModule();
|
||||
});
|
||||
|
||||
it('should create an instance', () => {
|
||||
expect(reactiveModule).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,13 @@
|
||||
import { TemplateModule } from './template.module';
|
||||
|
||||
describe('TemplateModule', () => {
|
||||
let templateModule: TemplateModule;
|
||||
|
||||
beforeEach(() => {
|
||||
templateModule = new TemplateModule();
|
||||
});
|
||||
|
||||
it('should create an instance', () => {
|
||||
expect(templateModule).toBeTruthy();
|
||||
});
|
||||
});
|
@ -3,7 +3,7 @@ import { NgModule } from '@angular/core';
|
||||
import { BrowserModule } from '@angular/platform-browser';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
|
||||
import { AppComponent } from './app.component';
|
||||
import { AppComponent } from './app.component';
|
||||
import { HeroFormComponent } from './hero-form/hero-form.component';
|
||||
|
||||
@NgModule({
|
||||
|
@ -200,4 +200,13 @@
|
||||
(ngModelChange)="model.name = $event">
|
||||
TODO: remove this: {{model.name}}
|
||||
<!-- #enddocregion ngModel-3-->
|
||||
<hr>
|
||||
<!-- #docregion ngModelName-2 -->
|
||||
<input type="text" class="form-control" id="name"
|
||||
required
|
||||
[(ngModel)]="model.name" name="name"
|
||||
#spy>
|
||||
<br>TODO: remove this: {{spy.className}}
|
||||
<!-- #enddocregion ngModelName-2 -->
|
||||
|
||||
</div>
|
||||
|
@ -2,7 +2,7 @@
|
||||
// #docregion , v1, final
|
||||
import { Component } from '@angular/core';
|
||||
|
||||
import { Hero } from '../hero';
|
||||
import { Hero } from '../hero';
|
||||
|
||||
@Component({
|
||||
selector: 'app-hero-form',
|
||||
|
@ -1,7 +1,3 @@
|
||||
{
|
||||
"projectType": "testing",
|
||||
"tests": [
|
||||
{"cmd": "yarn", "args": ["test", "--browsers=ChromeHeadless", "--no-watch"]},
|
||||
{"cmd": "yarn", "args": ["e2e", "--prod", "--protractor-config=e2e/protractor-puppeteer.conf.js", "--no-webdriver-update", "--port={PORT}"]}
|
||||
]
|
||||
"projectType": "testing"
|
||||
}
|
||||
|
@ -13,13 +13,13 @@ import { searchUrl } from '../package-search/package-search.service';
|
||||
|
||||
|
||||
/**
|
||||
* If request is cacheable (e.g., package search) and
|
||||
* If request is cachable (e.g., package search) and
|
||||
* response is in cache return the cached response as observable.
|
||||
* If has 'x-refresh' header that is true,
|
||||
* then also re-run the package search, using response from next(),
|
||||
* returning an observable that emits the cached response first.
|
||||
*
|
||||
* If not in cache or not cacheable,
|
||||
* If not in cache or not cachable,
|
||||
* pass request through to next()
|
||||
*/
|
||||
// #docregion v1
|
||||
@ -28,8 +28,8 @@ export class CachingInterceptor implements HttpInterceptor {
|
||||
constructor(private cache: RequestCache) {}
|
||||
|
||||
intercept(req: HttpRequest<any>, next: HttpHandler) {
|
||||
// continue if not cacheable.
|
||||
if (!isCacheable(req)) { return next.handle(req); }
|
||||
// continue if not cachable.
|
||||
if (!isCachable(req)) { return next.handle(req); }
|
||||
|
||||
const cachedResponse = this.cache.get(req);
|
||||
// #enddocregion v1
|
||||
@ -51,11 +51,11 @@ export class CachingInterceptor implements HttpInterceptor {
|
||||
// #enddocregion v1
|
||||
|
||||
|
||||
/** Is this request cacheable? */
|
||||
function isCacheable(req: HttpRequest<any>) {
|
||||
// Only GET requests are cacheable
|
||||
/** Is this request cachable? */
|
||||
function isCachable(req: HttpRequest<any>) {
|
||||
// Only GET requests are cachable
|
||||
return req.method === 'GET' &&
|
||||
// Only npm package search is cacheable in this app
|
||||
// Only npm package search is cachable in this app
|
||||
-1 < req.url.indexOf(searchUrl);
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"projectType": "i18n",
|
||||
"tests": [
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [
|
||||
|
@ -1,9 +1,11 @@
|
||||
{
|
||||
"files": [
|
||||
"files":[
|
||||
"!dist/",
|
||||
"!**/*.d.ts",
|
||||
"!src/**/*.js",
|
||||
"!doc-files/**/*",
|
||||
"**/*.xlf"
|
||||
]
|
||||
],
|
||||
"removeSystemJsConfig": true,
|
||||
"type": "i18n"
|
||||
}
|
||||
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { AliasingComponent } from './aliasing.component';
|
||||
|
||||
describe('AliasingComponent', () => {
|
||||
let component: AliasingComponent;
|
||||
let fixture: ComponentFixture<AliasingComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ AliasingComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(AliasingComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { InTheMetadataComponent } from './in-the-metadata.component';
|
||||
|
||||
describe('InTheMetadataComponent', () => {
|
||||
let component: InTheMetadataComponent;
|
||||
let fixture: ComponentFixture<InTheMetadataComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ InTheMetadataComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(InTheMetadataComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { InputOutputComponent } from './input-output.component';
|
||||
|
||||
describe('InputOutputComponent', () => {
|
||||
let component: InputOutputComponent;
|
||||
let fixture: ComponentFixture<InputOutputComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ InputOutputComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(InputOutputComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemDetailComponent } from './item-detail.component';
|
||||
|
||||
describe('ItemDetailComponent', () => {
|
||||
let component: ItemDetailComponent;
|
||||
let fixture: ComponentFixture<ItemDetailComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemDetailComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemDetailComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemOutputComponent } from './item-output.component';
|
||||
|
||||
describe('ItemOutputComponent', () => {
|
||||
let component: ItemOutputComponent;
|
||||
let fixture: ComponentFixture<ItemOutputComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemOutputComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemOutputComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,16 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
});
|
@ -0,0 +1,36 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { RouterTestingModule } from '@angular/router/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
RouterTestingModule
|
||||
],
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
});
|
||||
TestBed.compileComponents();
|
||||
});
|
||||
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
|
||||
it(`should have as title 'customer-app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('customer-app');
|
||||
}));
|
||||
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('customer-app');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { CustomersComponent } from './customers.component';
|
||||
|
||||
describe('CustomerListComponent', () => {
|
||||
let component: CustomersComponent;
|
||||
let fixture: ComponentFixture<CustomersComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ CustomersComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(CustomersComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { OrdersComponent } from './orders.component';
|
||||
|
||||
describe('OrderListComponent', () => {
|
||||
let component: OrdersComponent;
|
||||
let fixture: ComponentFixture<OrdersComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ OrdersComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(OrdersComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -3,7 +3,6 @@ import {
|
||||
AfterContentInit,
|
||||
AfterViewChecked,
|
||||
AfterViewInit,
|
||||
Directive,
|
||||
DoCheck,
|
||||
OnChanges,
|
||||
OnDestroy,
|
||||
@ -16,8 +15,7 @@ import { LoggerService } from './logger.service';
|
||||
let nextId = 1;
|
||||
|
||||
// #docregion ngOnInit
|
||||
@Directive()
|
||||
export class PeekABooDirective implements OnInit {
|
||||
export class PeekABoo implements OnInit {
|
||||
constructor(private logger: LoggerService) { }
|
||||
|
||||
// implement OnInit's `ngOnInit` method
|
||||
@ -36,7 +34,7 @@ export class PeekABooDirective implements OnInit {
|
||||
})
|
||||
// Don't HAVE to mention the Lifecycle Hook interfaces
|
||||
// unless we want typing and tool support.
|
||||
export class PeekABooComponent extends PeekABooDirective implements
|
||||
export class PeekABooComponent extends PeekABoo implements
|
||||
OnChanges, OnInit, DoCheck,
|
||||
AfterContentInit, AfterContentChecked,
|
||||
AfterViewInit, AfterViewChecked,
|
||||
|
32
aio/content/examples/ngmodules/src/app/app.component.spec.ts
Normal file
32
aio/content/examples/ngmodules/src/app/app.component.spec.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
});
|
||||
TestBed.compileComponents();
|
||||
});
|
||||
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
|
||||
it(`should have as title 'app works!'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app works!');
|
||||
}));
|
||||
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('app works!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ContactComponent } from './contact.component';
|
||||
|
||||
describe('ContactComponent', () => {
|
||||
let component: ContactComponent;
|
||||
let fixture: ComponentFixture<ContactComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ContactComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ContactComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -23,7 +23,7 @@ export class GreetingModule {
|
||||
// #enddocregion ctor
|
||||
|
||||
// #docregion for-root
|
||||
static forRoot(config: UserServiceConfig): ModuleWithProviders<GreetingModule> {
|
||||
static forRoot(config: UserServiceConfig): ModuleWithProviders {
|
||||
return {
|
||||
ngModule: GreetingModule,
|
||||
providers: [
|
||||
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemsComponent } from './items.component';
|
||||
|
||||
describe('ItemsComponent', () => {
|
||||
let component: ItemsComponent;
|
||||
let fixture: ComponentFixture<ItemsComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemsComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemsComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"tests": [
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [ "tsc", "--project", "./tsconfig.app.json" ]
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"tests": [
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [ "tsc", "--project", "./tsconfig.app.json" ]
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"tests": [
|
||||
"e2e": [
|
||||
{
|
||||
"cmd": "yarn",
|
||||
"args": [ "tsc", "--project", "./tsconfig.app.json" ]
|
||||
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');
|
||||
}));
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemDetailComponent } from './item-detail.component';
|
||||
|
||||
describe('ItemDetailComponent', () => {
|
||||
let component: ItemDetailComponent;
|
||||
let fixture: ComponentFixture<ItemDetailComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemDetailComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemDetailComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ItemListComponent } from './item-list.component';
|
||||
|
||||
describe('ItemListComponent', () => {
|
||||
let component: ItemListComponent;
|
||||
let fixture: ComponentFixture<ItemListComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ ItemListComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(ItemListComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { StringInitComponent } from './string-init.component';
|
||||
|
||||
describe('StringInitComponent', () => {
|
||||
let component: StringInitComponent;
|
||||
let fixture: ComponentFixture<StringInitComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ StringInitComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(StringInitComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
32
aio/content/examples/providers/src/app/app.component.spec.ts
Normal file
32
aio/content/examples/providers/src/app/app.component.spec.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
});
|
||||
TestBed.compileComponents();
|
||||
});
|
||||
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
|
||||
it(`should have as title 'app works!'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app works!');
|
||||
}));
|
||||
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('app works!');
|
||||
}));
|
||||
});
|
12
aio/content/examples/providers/src/app/user.service.spec.ts
Normal file
12
aio/content/examples/providers/src/app/user.service.spec.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { TestBed, inject } from '@angular/core/testing';
|
||||
import { UserService } from './user.service';
|
||||
|
||||
describe('UserService', () => {
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({});
|
||||
});
|
||||
|
||||
it('should ...', inject([UserService], (service: UserService) => {
|
||||
expect(service).toBeTruthy();
|
||||
}));
|
||||
});
|
@ -0,0 +1,27 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to reactive-forms!');
|
||||
}));
|
||||
});
|
@ -1,42 +0,0 @@
|
||||
import { browser, element, by } from 'protractor';
|
||||
|
||||
describe('Router basic tutorial e2e tests', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
browser.get('');
|
||||
});
|
||||
|
||||
it('should display Angular Router Sample', () => {
|
||||
expect(element(by.css('h1')).getText()).toBe('Angular Router Sample');
|
||||
});
|
||||
|
||||
it('should display Crisis Center button', () => {
|
||||
expect(element.all(by.css('a')).get(0).getText()).toBe('Crisis Center');
|
||||
});
|
||||
|
||||
it('should display Heroes button', () => {
|
||||
expect(element.all(by.css('a')).get(1).getText()).toBe('Heroes');
|
||||
});
|
||||
|
||||
it('should display HEROES', () => {
|
||||
expect(element(by.css('h3')).getText()).toBe('HEROES');
|
||||
});
|
||||
|
||||
it('should change to display crisis list component', async () => {
|
||||
const crisisButton = element.all(by.css('a')).get(0);
|
||||
await crisisButton.click();
|
||||
expect(element(by.css('h3')).getText()).toBe('CRISIS CENTER');
|
||||
});
|
||||
|
||||
it('should change to display heroes component', async () => {
|
||||
const heroesButton = element.all(by.css('a')).get(1);
|
||||
await heroesButton.click();
|
||||
expect(element(by.css('h3')).getText()).toBe('HEROES');
|
||||
});
|
||||
|
||||
it('should use wildcard route', async () => {
|
||||
browser.get('/fake-page');
|
||||
expect(browser.getCurrentUrl()).toContain('fake-page');
|
||||
expect(element(by.css('h2')).getText()).toBe('Page Not Found');
|
||||
});
|
||||
});
|
@ -1,34 +0,0 @@
|
||||
.button {
|
||||
box-shadow: inset 0px 1px 0px 0px #ffffff;
|
||||
background: linear-gradient(to bottom, #ffffff 5%, #f6f6f6 100%);
|
||||
background-color: #ffffff;
|
||||
border-radius: 6px;
|
||||
border: 1px solid #dcdcdc;
|
||||
display: inline-block;
|
||||
cursor: pointer;
|
||||
color: #666666;
|
||||
font-family: Arial;
|
||||
font-size: 15px;
|
||||
font-weight: bold;
|
||||
padding: 6px 24px;
|
||||
text-decoration: none;
|
||||
text-shadow: 0px 1px 0px #ffffff;
|
||||
outline: 0;
|
||||
}
|
||||
.activebutton {
|
||||
box-shadow: inset 0px 1px 0px 0px #dcecfb;
|
||||
background: linear-gradient(to bottom, #bddbfa 5%, #80b5ea 100%);
|
||||
background-color: #bddbfa;
|
||||
border-radius: 6px;
|
||||
border: 1px solid #84bbf3;
|
||||
display: inline-block;
|
||||
cursor: pointer;
|
||||
color: #ffffff;
|
||||
font-family: Arial;
|
||||
font-size: 15px;
|
||||
font-weight: bold;
|
||||
padding: 6px 24px;
|
||||
text-decoration: none;
|
||||
text-shadow: 0px 1px 0px #528ecc;
|
||||
outline: 0;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user