Compare commits
15 Commits
10.0.0-nex
...
starting
Author | SHA1 | Date | |
---|---|---|---|
2800683e64 | |||
a1158a06bd | |||
35a376acdb | |||
b8f0209ba5 | |||
9386bb1cb1 | |||
01592ff773 | |||
2a5d0dd59a | |||
e07a2d10c7 | |||
7234d368e3 | |||
cdc83730df | |||
0f30ce6e92 | |||
fb6b8b22a1 | |||
853c24f4d4 | |||
e37f58a228 | |||
e569e0b1ee |
97
.bazelignore
97
.bazelignore
@ -1,97 +0,0 @@
|
||||
# Bazel does not yet support wildcards or other .gitignore semantics for
|
||||
# .bazelignore. Two issues for this feature request are outstanding:
|
||||
# https://github.com/bazelbuild/bazel/issues/7093
|
||||
# https://github.com/bazelbuild/bazel/issues/8106
|
||||
.git
|
||||
node_modules
|
||||
dist
|
||||
aio/content
|
||||
aio/node_modules
|
||||
aio/tools/examples/shared/node_modules
|
||||
packages/bazel/node_modules
|
||||
integration/bazel/bazel-bazel
|
||||
integration/bazel/bazel-bin
|
||||
integration/bazel/bazel-out
|
||||
integration/bazel/bazel-testlogs
|
||||
integration/bazel-schematics/demo
|
||||
# All integration test node_modules folders
|
||||
integration/bazel/node_modules
|
||||
integration/bazel-schematics/node_modules
|
||||
integration/cli-hello-world/node_modules
|
||||
integration/cli-hello-world-ivy-compat/node_modules
|
||||
integration/cli-hello-world-ivy-i18n/node_modules
|
||||
integration/cli-hello-world-ivy-minimal/node_modules
|
||||
integration/cli-hello-world-lazy/node_modules
|
||||
integration/cli-hello-world-lazy-rollup/node_modules
|
||||
integration/dynamic-compiler/node_modules
|
||||
integration/hello_world__closure/node_modules
|
||||
integration/hello_world__systemjs_umd/node_modules
|
||||
integration/i18n/node_modules
|
||||
integration/injectable-def/node_modules
|
||||
integration/ivy-i18n/node_modules
|
||||
integration/language_service_plugin/node_modules
|
||||
integration/ng_elements/node_modules
|
||||
integration/ng_elements_schematics/node_modules
|
||||
integration/ng_update/node_modules
|
||||
integration/ng_update_migrations/node_modules
|
||||
integration/ngcc/node_modules
|
||||
integration/platform-server/node_modules
|
||||
integration/service-worker-schema/node_modules
|
||||
integration/side-effects/node_modules
|
||||
integration/terser/node_modules
|
||||
integration/typings_test_ts36/node_modules
|
||||
integration/typings_test_ts37/node_modules
|
||||
# All integration test .yarn_local_cache folders
|
||||
integration/bazel/.yarn_local_cache
|
||||
integration/bazel-schematics/.yarn_local_cache
|
||||
integration/cli-hello-world/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-compat/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-i18n/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-minimal/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy-rollup/.yarn_local_cache
|
||||
integration/dynamic-compiler/.yarn_local_cache
|
||||
integration/hello_world__closure/.yarn_local_cache
|
||||
integration/hello_world__systemjs_umd/.yarn_local_cache
|
||||
integration/i18n/.yarn_local_cache
|
||||
integration/injectable-def/.yarn_local_cache
|
||||
integration/ivy-i18n/.yarn_local_cache
|
||||
integration/language_service_plugin/.yarn_local_cache
|
||||
integration/ng_elements/.yarn_local_cache
|
||||
integration/ng_elements_schematics/.yarn_local_cache
|
||||
integration/ng_update/.yarn_local_cache
|
||||
integration/ng_update_migrations/.yarn_local_cache
|
||||
integration/ngcc/.yarn_local_cache
|
||||
integration/platform-server/.yarn_local_cache
|
||||
integration/service-worker-schema/.yarn_local_cache
|
||||
integration/side-effects/.yarn_local_cache
|
||||
integration/terser/.yarn_local_cache
|
||||
integration/typings_test_ts36/.yarn_local_cache
|
||||
integration/typings_test_ts37/.yarn_local_cache
|
||||
# All integration test NPM_PACKAGE_MANIFEST.json folders
|
||||
integration/bazel/NPM_PACKAGE_MANIFEST.json
|
||||
integration/bazel-schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-compat/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-minimal/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy-rollup/NPM_PACKAGE_MANIFEST.json
|
||||
integration/dynamic-compiler/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__closure/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__systemjs_umd/NPM_PACKAGE_MANIFEST.json
|
||||
integration/i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/injectable-def/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/language_service_plugin/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements_schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update_migrations/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ngcc/NPM_PACKAGE_MANIFEST.json
|
||||
integration/platform-server/NPM_PACKAGE_MANIFEST.json
|
||||
integration/service-worker-schema/NPM_PACKAGE_MANIFEST.json
|
||||
integration/side-effects/NPM_PACKAGE_MANIFEST.json
|
||||
integration/terser/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts36/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts37/NPM_PACKAGE_MANIFEST.json
|
155
.bazelrc
155
.bazelrc
@ -1,155 +0,0 @@
|
||||
# Enable debugging tests with --config=debug
|
||||
test:debug --test_arg=--node_options=--inspect-brk --test_output=streamed --test_strategy=exclusive --test_timeout=9999 --nocache_test_results
|
||||
|
||||
###############################
|
||||
# Filesystem interactions #
|
||||
###############################
|
||||
|
||||
# Create symlinks in the project:
|
||||
# - dist/bin for outputs
|
||||
# - dist/testlogs, dist/genfiles
|
||||
# - bazel-out
|
||||
# NB: bazel-out should be excluded from the editor configuration.
|
||||
# The checked-in /.vscode/settings.json does this for VSCode.
|
||||
# Other editors may require manual config to ignore this directory.
|
||||
# In the past, we say a problem where VSCode traversed a massive tree, opening file handles and
|
||||
# eventually a surprising failure with auto-discovery of the C++ toolchain in
|
||||
# MacOS High Sierra.
|
||||
# See https://github.com/bazelbuild/bazel/issues/4603
|
||||
build --symlink_prefix=dist/
|
||||
|
||||
# Turn off legacy external runfiles
|
||||
build --nolegacy_external_runfiles
|
||||
run --nolegacy_external_runfiles
|
||||
test --nolegacy_external_runfiles
|
||||
|
||||
# Turn on --incompatible_strict_action_env which was on by default
|
||||
# in Bazel 0.21.0 but turned off again in 0.22.0. Follow
|
||||
# https://github.com/bazelbuild/bazel/issues/7026 for more details.
|
||||
# This flag is needed to so that the bazel cache is not invalidated
|
||||
# when running bazel via `yarn bazel`.
|
||||
# See https://github.com/angular/angular/issues/27514.
|
||||
build --incompatible_strict_action_env
|
||||
run --incompatible_strict_action_env
|
||||
test --incompatible_strict_action_env
|
||||
|
||||
# Do not build runfile trees by default. If an execution strategy relies on runfile
|
||||
# symlink teee, the tree is created on-demand. See: https://github.com/bazelbuild/bazel/issues/6627
|
||||
# and https://github.com/bazelbuild/bazel/commit/03246077f948f2790a83520e7dccc2625650e6df
|
||||
build --nobuild_runfile_links
|
||||
|
||||
###############################
|
||||
# Release support #
|
||||
# Turn on these settings with #
|
||||
# --config=release #
|
||||
###############################
|
||||
|
||||
# Releases should always be stamped with version control info
|
||||
# This command assumes node on the path and is a workaround for
|
||||
# https://github.com/bazelbuild/bazel/issues/4802
|
||||
build:release --workspace_status_command="yarn -s ng-dev release build-env-stamp"
|
||||
build:release --stamp
|
||||
|
||||
###############################
|
||||
# Output #
|
||||
###############################
|
||||
|
||||
# A more useful default output mode for bazel query
|
||||
# Prints eg. "ng_module rule //foo:bar" rather than just "//foo:bar"
|
||||
query --output=label_kind
|
||||
|
||||
# By default, failing tests don't print any output, it goes to the log file
|
||||
test --test_output=errors
|
||||
|
||||
################################
|
||||
# Settings for CircleCI #
|
||||
################################
|
||||
|
||||
# Bazel flags for CircleCI are in /.circleci/bazel.linux.rc and /.circleci/bazel.windows.rc
|
||||
|
||||
##################################
|
||||
# Settings for integration tests #
|
||||
##################################
|
||||
|
||||
# Trick bazel into treating BUILD files under integration/bazel as being regular files
|
||||
# This lets us glob() up all the files inside this integration test to make them inputs to tests
|
||||
# (Note, we cannot use common --deleted_packages because the bazel version command doesn't support it)
|
||||
build --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
query --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
|
||||
################################
|
||||
# Temporary Settings for Ivy #
|
||||
################################
|
||||
# To determine if the compiler used should be Ivy instead of ViewEngine, one can use `--config=ivy`
|
||||
# on any bazel target. This is a temporary flag until codebase is permanently switched to Ivy.
|
||||
build --define=angular_ivy_enabled=False
|
||||
|
||||
build:view-engine --define=angular_ivy_enabled=False
|
||||
build:ivy --define=angular_ivy_enabled=True
|
||||
|
||||
##################################
|
||||
# Remote Build Execution support #
|
||||
# Turn on these settings with #
|
||||
# --config=remote #
|
||||
##################################
|
||||
|
||||
# The following --define=EXECUTOR=remote will be able to be removed
|
||||
# once https://github.com/bazelbuild/bazel/issues/7254 is fixed
|
||||
build:remote --define=EXECUTOR=remote
|
||||
|
||||
# Set a higher timeout value, just in case.
|
||||
build:remote --remote_timeout=600
|
||||
|
||||
# Increase the default number of jobs by 50% because our build has lots of
|
||||
# parallelism
|
||||
build:remote --jobs=150
|
||||
build:remote --google_default_credentials
|
||||
|
||||
# Force remote exeuctions to consider the entire run as linux
|
||||
build:remote --cpu=k8
|
||||
build:remote --host_cpu=k8
|
||||
|
||||
# Toolchain and platform related flags
|
||||
build:remote --host_javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --crosstool_top=@rbe_ubuntu1604_angular//cc:toolchain
|
||||
build:remote --extra_toolchains=@rbe_ubuntu1604_angular//config:cc-toolchain
|
||||
build:remote --extra_execution_platforms=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --host_platform=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --platforms=//tools:rbe_ubuntu1604-angular
|
||||
|
||||
# Remote instance and caching
|
||||
build:remote --remote_instance_name=projects/internal-200822/instances/default_instance
|
||||
build:remote --project_id=internal-200822
|
||||
build:remote --remote_cache=remotebuildexecution.googleapis.com
|
||||
build:remote --remote_executor=remotebuildexecution.googleapis.com
|
||||
|
||||
##################################
|
||||
# Saucelabs tests settings #
|
||||
# Turn on these settings with #
|
||||
# --config=saucelabs #
|
||||
##################################
|
||||
|
||||
# For saucelabs tests we don't want to enable flaky test attempts. Karma has its own integrated
|
||||
# retry mechanism and we do not want to retry unnecessarily if Karma already tried multiple times.
|
||||
test:saucelabs --flaky_test_attempts=1
|
||||
|
||||
###############################
|
||||
# NodeJS rules settings
|
||||
# These settings are required for rules_nodejs
|
||||
###############################
|
||||
|
||||
# Turn on managed directories feature in Bazel
|
||||
# This allows us to avoid installing a second copy of node_modules
|
||||
common --experimental_allow_incremental_repository_updates
|
||||
|
||||
####################################################
|
||||
# User bazel configuration
|
||||
# NOTE: This needs to be the *last* entry in the config.
|
||||
####################################################
|
||||
|
||||
# Load any settings which are specific to the current user. Needs to be *last* statement
|
||||
# in this config, as the user configuration should be able to overwrite flags from this file.
|
||||
try-import .bazelrc.user
|
@ -1,3 +0,0 @@
|
||||
2.1.1
|
||||
# [NB: this comment has to be after the first line, see https://github.com/bazelbuild/bazelisk/issues/117]
|
||||
# When updating the Bazel version you also need to update the RBE toolchains version in package.bzl
|
@ -1,19 +0,0 @@
|
||||
# Encryption
|
||||
|
||||
Based on https://github.com/circleci/encrypted-files
|
||||
|
||||
In the CircleCI web UI, we have a secret variable called `KEY`
|
||||
https://circleci.com/gh/angular/angular/edit#env-vars
|
||||
which is only exposed to non-fork builds
|
||||
(see "Pass secrets to builds from forked pull requests" under
|
||||
https://circleci.com/gh/angular/angular/edit#advanced-settings)
|
||||
|
||||
We use this as a symmetric AES encryption key to encrypt tokens like
|
||||
a GitHub token that enables publishing snapshots.
|
||||
|
||||
To create the github_token file, we take this approach:
|
||||
- Find the angular-builds:token in http://valentine
|
||||
- Go inside the CircleCI default docker image so you use the same version of openssl as we will at runtime: `docker run --rm -it circleci/node:10.12`
|
||||
- echo "https://[token]:@github.com" > credentials
|
||||
- openssl aes-256-cbc -e -in credentials -out .circleci/github_token -k $KEY
|
||||
- If needed, base64-encode the result so you can copy-paste it out of docker: `base64 github_token`
|
@ -1,15 +0,0 @@
|
||||
# Settings in this file should be OS agnostic. Use the bazel.<OS>.rc files for OS specific settings.
|
||||
|
||||
# Don't be spammy in the logs
|
||||
build --noshow_progress
|
||||
|
||||
# Print all the options that apply to the build.
|
||||
# This helps us diagnose which options override others
|
||||
# (e.g. /etc/bazel.bazelrc vs. tools/bazel.rc)
|
||||
build --announce_rc
|
||||
|
||||
# Retry in the event of flakes, eg. https://circleci.com/gh/angular/angular/31309
|
||||
test --flaky_test_attempts=2
|
||||
|
||||
# More details on failures
|
||||
build --verbose_failures=true
|
@ -1,22 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to /etc/bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=/home/circleci/bazel_repository_cache
|
||||
|
||||
# Workaround https://github.com/bazelbuild/bazel/issues/3645
|
||||
# Bazel doesn't calculate the memory ceiling correctly when running under Docker.
|
||||
# Limit Bazel to consuming resources that fit in CircleCI "xlarge" class
|
||||
# https://circleci.com/docs/2.0/configuration-reference/#resource_class
|
||||
build --local_cpu_resources=8
|
||||
build --local_ram_resources=14336
|
||||
|
||||
# All build executed remotely should be done using our RBE configuration.
|
||||
build:remote --google_default_credentials
|
||||
build --config=remote
|
@ -1,21 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to $env:ProgramData\bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=C:/Users/circleci/bazel_repository_cache
|
||||
|
||||
# Manually set the local resources used in windows CI runs
|
||||
build --local_ram_resources=13500
|
||||
build --local_cpu_resources=4
|
||||
|
||||
# All windows jobs run on master and should use http caching
|
||||
build --remote_http_cache=https://storage.googleapis.com/angular-team-cache
|
||||
build --remote_accept_cached=true
|
||||
build --remote_upload_local_results=true
|
||||
build --google_default_credentials
|
@ -1,903 +0,0 @@
|
||||
# Configuration file for https://circleci.com/gh/angular/angular
|
||||
|
||||
# Note: YAML anchors allow an object to be re-used, reducing duplication.
|
||||
# The ampersand declares an alias for an object, then later the `<<: *name`
|
||||
# syntax dereferences it.
|
||||
# See http://blog.daemonl.com/2016/02/yaml.html
|
||||
# To validate changes, use an online parser, eg.
|
||||
# http://yaml-online-parser.appspot.com/
|
||||
|
||||
# CircleCI configuration version
|
||||
# Version 2.1 allows for extra config reuse features
|
||||
# https://circleci.com/docs/2.0/reusing-config/#getting-started-with-config-reuse
|
||||
version: 2.1
|
||||
|
||||
# We don't want to include the current branch name in the cache key because that would prevent
|
||||
# PRs from being able to restore the cache since the branch names are always different for PRs.
|
||||
# The cache key should only consist of dynamic values that change whenever something in the
|
||||
# cache changes. For example:
|
||||
# 1) yarn lock file changes --> cached "node_modules" are different.
|
||||
# 2) bazel repository definitions change --> cached bazel repositories are different.
|
||||
# Windows needs its own cache key because binaries in node_modules are different.
|
||||
# **NOTE 1 **: If you change the cache key prefix, also sync the cache_key_fallback to match.
|
||||
# **NOTE 2 **: Keep the static part of the cache key as prefix to enable correct fallbacks.
|
||||
# See https://circleci.com/docs/2.0/caching/#restoring-cache for how prefixes work in CircleCI.
|
||||
var_3: &cache_key v7-angular-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
# We invalidate the cache if the Bazel version changes because otherwise the `bazelisk` cache
|
||||
# folder will contain all previously used versions and ultimately cause the cache restoring to
|
||||
# be slower due to its growing size.
|
||||
var_4: &cache_key_fallback v7-angular-node-12-{{ checksum ".bazelversion" }}
|
||||
var_3_win: &cache_key_win v7-angular-win-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4_win: &cache_key_win_fallback v7-angular-win-node-12-{{ checksum ".bazelversion" }}
|
||||
|
||||
# Cache key for the `components-repo-unit-tests` job. **Note** when updating the SHA in the
|
||||
# cache keys also update the SHA for the "COMPONENTS_REPO_COMMIT" environment variable.
|
||||
var_5: &components_repo_unit_tests_cache_key v7-angular-components-448523bffffecd2b53a3d2854c3051b6b7a3934f
|
||||
var_6: &components_repo_unit_tests_cache_key_fallback v7-angular-components-
|
||||
|
||||
# Workspace initially persisted by the `setup` job, and then enhanced by `build-npm-packages` and
|
||||
# `build-ivy-npm-packages`.
|
||||
# https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
|
||||
# https://circleci.com/blog/deep-diving-into-circleci-workspaces/
|
||||
var_7: &workspace_location ~/
|
||||
|
||||
# Filter to run a job on builds for pull requests only.
|
||||
var_8: &only_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- /pull\/\d+/
|
||||
|
||||
# Filter to skip a job on builds for pull requests.
|
||||
var_9: &skip_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
ignore:
|
||||
- /pull\/\d+/
|
||||
|
||||
# Filter to run a job on builds for the master branch only.
|
||||
var_10: &only_on_master
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
# Executor Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-executors
|
||||
# **NOTE 1**: Pin to exact images using an ID (SHA). See https://circleci.com/docs/2.0/circleci-images/#using-a-docker-image-id-to-pin-an-image-to-a-fixed-version.
|
||||
# (Using the tag in not necessary when pinning by ID, but include it anyway for documentation purposes.)
|
||||
# **NOTE 2**: If you change the version of the docker images, also change the `cache_key` suffix.
|
||||
# **NOTE 3**: If you change the version of the `*-browsers` docker image, make sure the
|
||||
# `--versions.chrome` arg in `integration/bazel-schematics/test.sh` specifies a
|
||||
# ChromeDriver version that is compatible with the Chrome version in the image.
|
||||
executors:
|
||||
default-executor:
|
||||
parameters:
|
||||
resource_class:
|
||||
type: string
|
||||
default: medium
|
||||
docker:
|
||||
- image: circleci/node:12.14.1@sha256:f9de24fc0017059cc42ef7d07db060008af65a98b1f0cdd1ef3339213226bf6d
|
||||
resource_class: << parameters.resource_class >>
|
||||
working_directory: ~/ng
|
||||
|
||||
windows-executor:
|
||||
working_directory: ~/ng
|
||||
resource_class: windows.medium
|
||||
# CircleCI windows VMs do have the GitBash shell available:
|
||||
# https://github.com/CircleCI-Public/windows-preview-docs#shells
|
||||
# But in this specific case we really should not use it because Bazel must not be ran from
|
||||
# GitBash. These issues discuss why:
|
||||
# https://github.com/bazelbuild/bazel/issues/5751
|
||||
# https://github.com/bazelbuild/bazel/issues/5724#issuecomment-410194038
|
||||
# https://github.com/bazelbuild/bazel/issues/6339#issuecomment-441600879
|
||||
shell: powershell.exe -ExecutionPolicy Bypass
|
||||
machine:
|
||||
# Windows preview image that includes the following:
|
||||
# - Visual Studio 2019 build tools
|
||||
# - Node 12
|
||||
# - yarn 1.17
|
||||
# - Python 3 3.7.4
|
||||
image: windows-server-2019-vs2019:201908-02
|
||||
|
||||
# Command Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-commands
|
||||
commands:
|
||||
custom_attach_workspace:
|
||||
description: Attach workspace at a predefined location
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: *workspace_location
|
||||
|
||||
# Install shared libs used by Chrome that is either provisioned by
|
||||
# rules_webtesting or by puppeteer.
|
||||
install_chrome_libs:
|
||||
description: Install shared Chrome libs
|
||||
steps:
|
||||
- run:
|
||||
name: Install shared Chrome libs
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install GTK+ graphical user interface (libgtk-3-0), advanced linux sound architecture (libasound2)
|
||||
# and network security service libraries (libnss3) & X11 Screen Saver extension library (libssx1)
|
||||
# which are dependendies of chrome & needed for karma & protractor headless chrome tests.
|
||||
# This is a very small install which takes around 7s in comparing to using the full
|
||||
# circleci/node:x.x.x-browsers image.
|
||||
sudo apt-get -y install libgtk-3-0 libasound2 libnss3 libxss1
|
||||
|
||||
# Install java runtime which is required by some integration tests such as
|
||||
# //integration:hello_world__closure_test, //integration:i18n_test and
|
||||
# //integration:ng_elements_test to run the closure compiler
|
||||
install_java:
|
||||
description: Install java
|
||||
steps:
|
||||
- run:
|
||||
name: Install java
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install java runtime
|
||||
sudo apt-get install default-jre
|
||||
|
||||
# Initializes the CI environment by setting up common environment variables.
|
||||
init_environment:
|
||||
description: Initializing environment (setting up variables)
|
||||
steps:
|
||||
- run:
|
||||
name: Set up environment
|
||||
environment:
|
||||
CIRCLE_GIT_BASE_REVISION: << pipeline.git.base_revision >>
|
||||
CIRCLE_GIT_REVISION: << pipeline.git.revision >>
|
||||
command: ./.circleci/env.sh
|
||||
- run:
|
||||
# Configure git as the CircleCI `checkout` command does.
|
||||
# This is needed because we only checkout on the setup job.
|
||||
# Add GitHub to known hosts
|
||||
name: Configure git
|
||||
command: |
|
||||
mkdir -p ~/.ssh
|
||||
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' >> ~/.ssh/known_hosts
|
||||
git config --global url."ssh://git@github.com".insteadOf "https://github.com" || true
|
||||
git config --global gc.auto 0 || true
|
||||
|
||||
init_saucelabs_environment:
|
||||
description: Sets up a domain that resolves to the local host.
|
||||
steps:
|
||||
- run:
|
||||
name: Preparing environment for running tests on Saucelabs.
|
||||
command: |
|
||||
# For SauceLabs jobs, we set up a domain which resolves to the machine which launched
|
||||
# the tunnel. We do this because devices are sometimes not able to properly resolve
|
||||
# `localhost` or `127.0.0.1` through the SauceLabs tunnel. Using a domain that does not
|
||||
# resolve to anything on SauceLabs VMs ensures that such requests are always resolved
|
||||
# through the tunnel, and resolve to the actual tunnel host machine (i.e. the CircleCI VM).
|
||||
# More context can be found in: https://github.com/angular/angular/pull/35171.
|
||||
setPublicVar SAUCE_LOCALHOST_ALIAS_DOMAIN "angular-ci.local"
|
||||
setSecretVar SAUCE_ACCESS_KEY $(echo $SAUCE_ACCESS_KEY | rev)
|
||||
- run:
|
||||
# Sets up a local domain in the machine's host file that resolves to the local
|
||||
# host. This domain is helpful in Saucelabs tests where devices are not able to
|
||||
# properly resolve `localhost` or `127.0.0.1` through the sauce-connect tunnel.
|
||||
name: Setting up alias domain for local host.
|
||||
command: echo "127.0.0.1 $SAUCE_LOCALHOST_ALIAS_DOMAIN" | sudo tee -a /etc/hosts
|
||||
|
||||
# Normally this would be an individual job instead of a command.
|
||||
# But startup and setup time for each invidual windows job are high enough to discourage
|
||||
# many small jobs, so instead we use a command for setup unless the gain becomes significant.
|
||||
setup_win:
|
||||
description: Setup windows node environment
|
||||
steps:
|
||||
# Use the Linux workspace directly, as it already has checkout, rebased and node modules.
|
||||
- custom_attach_workspace
|
||||
# Install Bazel pre-requisites that aren't in the preconfigured CircleCI Windows VM.
|
||||
- run: ./.circleci/windows-env.ps1
|
||||
- run: node --version
|
||||
- run: yarn --version
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key_win
|
||||
- *cache_key_win_fallback
|
||||
# Reinstall to get windows binaries.
|
||||
- run: yarn install --frozen-lockfile --non-interactive
|
||||
# Install @bazel/bazelisk globally and use that for the first run.
|
||||
# Workaround for https://github.com/bazelbuild/rules_nodejs/issues/894
|
||||
# NB: the issue was for @bazel/bazel but the same problem applies to @bazel/bazelisk
|
||||
- run: yarn global add @bazel/bazelisk@$env:BAZELISK_VERSION
|
||||
- run: bazelisk info
|
||||
|
||||
notify_webhook_on_fail:
|
||||
description: Notify a webhook about failure
|
||||
parameters:
|
||||
# `webhook_url_env_var` are secret env vars defined in CircleCI project settings.
|
||||
# The URLs come from https://angular-team.slack.com/apps/A0F7VRE7N-circleci.
|
||||
webhook_url_env_var:
|
||||
type: env_var_name
|
||||
steps:
|
||||
- run:
|
||||
when: on_fail
|
||||
command: |
|
||||
notificationJson="{\"text\":\":x: \`$CIRCLE_JOB\` job for $CIRCLE_BRANCH branch failed on build $CIRCLE_BUILD_NUM: $CIRCLE_BUILD_URL :scream:\"}"
|
||||
curl --request POST --header "Content-Type: application/json" --data "$notificationJson" ${<< parameters.webhook_url_env_var >>}
|
||||
|
||||
# Job definitions
|
||||
# Jobs can include parameters that are passed in the workflow job invocation.
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-parameterized-jobs
|
||||
jobs:
|
||||
setup:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- checkout
|
||||
- init_environment
|
||||
- run:
|
||||
name: Rebase PR on target branch
|
||||
# After checkout, rebase on top of target branch.
|
||||
command: >
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then
|
||||
# User is required for rebase.
|
||||
git config user.name "angular-ci"
|
||||
git config user.email "angular-ci"
|
||||
# Rebase PR on top of target branch.
|
||||
node tools/rebase-pr.js
|
||||
else
|
||||
echo "This build is not over a PR, nothing to do."
|
||||
fi
|
||||
# This cache is saved in the build-npm-packages so that Bazel cache is also included.
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key
|
||||
- *cache_key_fallback
|
||||
- run:
|
||||
name: Running Yarn install
|
||||
command: yarn install --frozen-lockfile --non-interactive
|
||||
# Yarn's requests sometimes take more than 10mins to complete.
|
||||
no_output_timeout: 45m
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
# Make the bazel directories and add a file to them if they don't exist already so that
|
||||
# persist_to_workspace does not fail.
|
||||
- run: |
|
||||
if [ ! -d ~/bazel_repository_cache ]; then
|
||||
mkdir ~/bazel_repository_cache
|
||||
touch ~/bazel_repository_cache/MARKER
|
||||
fi
|
||||
# Persist any changes at this point to be reused by further jobs.
|
||||
# **NOTE**: To add new content to the workspace, always persist on the same root.
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ./ng
|
||||
- ./bazel_repository_cache
|
||||
|
||||
lint:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
|
||||
- run: yarn -s tslint
|
||||
- run: yarn -s ng-dev format changed $CI_GIT_BASE_REVISION --check
|
||||
- run: yarn -s ts-circular-deps:check
|
||||
- run: yarn -s ng-dev pullapprove verify
|
||||
- run: yarn -s ng-dev commit-message validate-range --range $CI_COMMIT_RANGE
|
||||
|
||||
test:
|
||||
executor:
|
||||
name: default-executor
|
||||
# Now that large integration tests are running locally in parallel (they can't run on RBE yet
|
||||
# as they require network access for yarn install), this test is running out of memory
|
||||
# consistently with the xlarge machine.
|
||||
# TODO: switch back to xlarge once integration tests are running on remote-exec
|
||||
resource_class: 2xlarge+
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- install_java
|
||||
- run:
|
||||
command: yarn bazel test //... --build_tag_filters=-ivy-only --test_tag_filters=-ivy-only
|
||||
no_output_timeout: 20m
|
||||
|
||||
# Temporary job to test what will happen when we flip the Ivy flag to true
|
||||
test_ivy_aot:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# We need to explicitly specify the --symlink_prefix option because otherwise we would
|
||||
# not be able to easily find the output bin directory when uploading artifacts for size
|
||||
# measurements.
|
||||
- run:
|
||||
command: yarn test-ivy-aot //... --symlink_prefix=dist/
|
||||
no_output_timeout: 20m
|
||||
|
||||
# Publish bundle artifacts which will be used to calculate the size change. **Note**: Make
|
||||
# sure that the size plugin from the Angular robot fetches the artifacts from this CircleCI
|
||||
# job (see .github/angular-robot.yml). Additionally any artifacts need to be stored with the
|
||||
# following path format: "{projectName}/{context}/{fileName}". This format is necessary
|
||||
# because otherwise the bot is not able to pick up the artifacts from CircleCI. See:
|
||||
# https://github.com/angular/github-robot/blob/master/functions/src/plugins/size.ts#L392-L394
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/hello_world/bundle.min.js
|
||||
destination: core/hello_world/bundle
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/todo/bundle.min.js
|
||||
destination: core/todo/bundle
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/hello_world/bundle.min.js.br
|
||||
destination: core/hello_world/bundle.br
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/todo/bundle.min.js.br
|
||||
destination: core/todo/bundle.br
|
||||
|
||||
# NOTE: This is currently limited to master builds only. See the `monitoring` configuration.
|
||||
saucelabs_view_engine:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Run Bazel tests on Saucelabs with ViewEngine
|
||||
# See /tools/saucelabs/README.md for more info
|
||||
command: |
|
||||
yarn bazel run //tools/saucelabs:sauce_service_setup
|
||||
TESTS=$(./node_modules/.bin/bazelisk query --output label '(kind(karma_web_test, ...) intersect attr("tags", "saucelabs", ...)) except attr("tags", "ivy-only", ...) except attr("tags", "fixme-saucelabs-ve", ...)')
|
||||
yarn bazel test --config=saucelabs ${TESTS}
|
||||
yarn bazel run //tools/saucelabs:sauce_service_stop
|
||||
no_output_timeout: 40m
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
# NOTE: This is currently limited to master builds only. See the `monitoring` configuration.
|
||||
saucelabs_ivy:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Run Bazel tests on Saucelabs with Ivy
|
||||
# See /tools/saucelabs/README.md for more info
|
||||
command: |
|
||||
yarn bazel run //tools/saucelabs:sauce_service_setup
|
||||
TESTS=$(./node_modules/.bin/bazelisk query --output label '(kind(karma_web_test, ...) intersect attr("tags", "saucelabs", ...)) except attr("tags", "no-ivy-aot", ...) except attr("tags", "fixme-saucelabs-ivy", ...)')
|
||||
yarn bazel test --config=saucelabs --config=ivy ${TESTS}
|
||||
yarn bazel run //tools/saucelabs:sauce_service_stop
|
||||
no_output_timeout: 40m
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
test_aio:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Build aio
|
||||
- run: yarn --cwd aio build --progress=false
|
||||
# Lint the code
|
||||
- run: yarn --cwd aio lint
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
# Run accessibility tests
|
||||
- run: yarn --cwd aio test-a11y-score-localhost
|
||||
# Check the bundle sizes.
|
||||
- run: yarn --cwd aio payload-size
|
||||
# Run unit tests for Firebase redirects
|
||||
- run: yarn --cwd aio redirects-test
|
||||
|
||||
deploy_aio:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Deploy angular.io to production (if necessary)
|
||||
- run: setPublicVar_CI_STABLE_BRANCH
|
||||
- run: yarn --cwd aio deploy-production
|
||||
|
||||
test_aio_local:
|
||||
parameters:
|
||||
viewengine:
|
||||
type: boolean
|
||||
default: false
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Build aio (with local Angular packages)
|
||||
- run: yarn --cwd aio build-local<<# parameters.viewengine >>-with-viewengine<</ parameters.viewengine >>-ci
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
# Check the bundle sizes.
|
||||
- run: yarn --cwd aio payload-size aio-local<<# parameters.viewengine >>-viewengine<</ parameters.viewengine >>
|
||||
|
||||
test_aio_tools:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Install
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run: yarn --cwd aio extract-cli-command-docs
|
||||
# Run tools tests
|
||||
- run: yarn --cwd aio tools-test
|
||||
- run: ./aio/aio-builds-setup/scripts/test.sh
|
||||
|
||||
test_docs_examples:
|
||||
parameters:
|
||||
viewengine:
|
||||
type: boolean
|
||||
default: false
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
parallelism: 5
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Install aio
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
# Run examples tests. The "CIRCLE_NODE_INDEX" will be set if "parallelism" is enabled.
|
||||
# Since the parallelism is set to "5", there will be five parallel CircleCI containers.
|
||||
# with either "0", "1", etc as node index. This can be passed to the "--shard" argument.
|
||||
- run: yarn --cwd aio example-e2e --setup --local <<# parameters.viewengine >>--viewengine<</ parameters.viewengine >> --cliSpecsConcurrency=5 --shard=${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL} --retry 2
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
aio_preview:
|
||||
executor: default-executor
|
||||
environment:
|
||||
AIO_SNAPSHOT_ARTIFACT_PATH: &aio_preview_artifact_path 'aio/tmp/snapshot.tgz'
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: ./aio/scripts/build-artifacts.sh $AIO_SNAPSHOT_ARTIFACT_PATH $CI_PULL_REQUEST $CI_COMMIT
|
||||
- store_artifacts:
|
||||
path: *aio_preview_artifact_path
|
||||
# The `destination` needs to be kept in synch with the value of
|
||||
# `AIO_ARTIFACT_PATH` in `aio/aio-builds-setup/Dockerfile`
|
||||
destination: aio/dist/aio-snapshot.tgz
|
||||
- run: node ./aio/scripts/create-preview $CIRCLE_BUILD_NUM
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
test_aio_preview:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run:
|
||||
name: Wait for preview and run tests
|
||||
command: node aio/scripts/test-preview.js $CI_PULL_REQUEST $CI_COMMIT $CI_AIO_MIN_PWA_SCORE
|
||||
|
||||
|
||||
# The `build-npm-packages` tasks exist for backwards-compatibility with old scripts and
|
||||
# tests that rely on the pre-Bazel `dist/packages-dist` output structure (build.sh).
|
||||
# Having multiple jobs that independently build in this manner duplicates some work; we build
|
||||
# the bazel packages more than once. Even though we have a remote cache, these jobs will
|
||||
# typically run in parallel so up-to-date outputs will not be available at the time the build
|
||||
# starts.
|
||||
|
||||
# Build the view engine npm packages. No new jobs should depend on this.
|
||||
build-npm-packages:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: node scripts/build/build-packages-dist.js
|
||||
|
||||
# Save the npm packages from //packages/... for other workflow jobs to read
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ng/dist/packages-dist
|
||||
- ng/dist/zone.js-dist
|
||||
|
||||
# Save dependencies and bazel repository cache to use on subsequent runs.
|
||||
- save_cache:
|
||||
key: *cache_key
|
||||
paths:
|
||||
- "node_modules"
|
||||
- "aio/node_modules"
|
||||
- "~/bazel_repository_cache"
|
||||
- "~/.cache/bazelisk"
|
||||
|
||||
# Build the ivy npm packages.
|
||||
build-ivy-npm-packages:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: node scripts/build/build-ivy-npm-packages.js
|
||||
|
||||
# Save the npm packages from //packages/... for other workflow jobs to read
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ng/dist/packages-dist-ivy-aot
|
||||
- ng/dist/zone.js-dist-ivy-aot
|
||||
|
||||
# This job creates compressed tarballs (`.tgz` files) for all Angular packages and stores them as
|
||||
# build artifacts. This makes it easy to try out changes from a PR build for testing purposes.
|
||||
# More info CircleCI build artifacts: https://circleci.com/docs/2.0/artifacts
|
||||
#
|
||||
# NOTE: Currently, this job only runs for PR builds. See `publish_snapshot` for non-PR builds.
|
||||
publish_packages_as_artifacts:
|
||||
executor: default-executor
|
||||
environment:
|
||||
NG_PACKAGES_DIR: &ng_packages_dir 'dist/packages-dist'
|
||||
NG_PACKAGES_ARCHIVES_DIR: &ng_packages_archives_dir 'dist/packages-dist-archives'
|
||||
ZONEJS_PACKAGES_DIR: &zonejs_packages_dir 'dist/zone.js-dist'
|
||||
ZONEJS_PACKAGES_ARCHIVES_DIR: &zonejs_packages_archives_dir 'dist/zone.js-dist-archives'
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Publish `@angular/*` packages.
|
||||
- run:
|
||||
name: Create artifacts for @angular/* packages
|
||||
command: ./scripts/ci/create-package-archives.sh $CI_BRANCH $CI_COMMIT $NG_PACKAGES_DIR $NG_PACKAGES_ARCHIVES_DIR
|
||||
- store_artifacts:
|
||||
path: *ng_packages_archives_dir
|
||||
destination: angular
|
||||
# Publish `zone.js` package.
|
||||
- run:
|
||||
name: Create artifacts for zone.js package
|
||||
command: ./scripts/ci/create-package-archives.sh $CI_BRANCH $CI_COMMIT $ZONEJS_PACKAGES_DIR $ZONEJS_PACKAGES_ARCHIVES_DIR
|
||||
- store_artifacts:
|
||||
path: *zonejs_packages_archives_dir
|
||||
destination: zone.js
|
||||
|
||||
# This job updates the content of repos like github.com/angular/core-builds
|
||||
# for every green build on angular/angular.
|
||||
publish_snapshot:
|
||||
executor: default-executor
|
||||
steps:
|
||||
# See below - ideally this job should not trigger for non-upstream builds.
|
||||
# But since it does, we have to check this condition.
|
||||
- run:
|
||||
name: Skip this job for Pull Requests and Fork builds
|
||||
# Note: Using `CIRCLE_*` env variables (instead of those defined in `env.sh` so that this
|
||||
# step can be run before `init_environment`.
|
||||
command: >
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]] ||
|
||||
[[ "$CIRCLE_PROJECT_USERNAME" != "angular" ]] ||
|
||||
[[ "$CIRCLE_PROJECT_REPONAME" != "angular" ]]; then
|
||||
circleci step halt
|
||||
fi
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# CircleCI has a config setting to force SSH for all github connections
|
||||
# This is not compatible with our mechanism of using a Personal Access Token
|
||||
# Clear the global setting
|
||||
- run: git config --global --unset "url.ssh://git@github.com.insteadof"
|
||||
- run:
|
||||
name: Decrypt github credentials
|
||||
# We need ensure that the same default digest is used for encoding and decoding with
|
||||
# openssl. Openssl versions might have different default digests which can cause
|
||||
# decryption failures based on the installed openssl version. https://stackoverflow.com/a/39641378/4317734
|
||||
command: 'openssl aes-256-cbc -d -in .circleci/github_token -md md5 -k "${KEY}" -out ~/.git_credentials'
|
||||
- run: ./scripts/ci/publish-build-artifacts.sh
|
||||
|
||||
aio_monitoring_stable:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run: setPublicVar_CI_STABLE_BRANCH
|
||||
- run:
|
||||
name: Check out `aio/` and yarn from the stable branch
|
||||
command: |
|
||||
git fetch origin $CI_STABLE_BRANCH
|
||||
git checkout --force origin/$CI_STABLE_BRANCH -- aio/ .yarn/ .yarnrc
|
||||
# Ignore yarn's engines check, because we checked out `aio/package.json` from the stable
|
||||
# branch and there could be a node version skew, which is acceptable in this monitoring job.
|
||||
- run: yarn config set ignore-engines true
|
||||
- run:
|
||||
name: Run tests against https://angular.io/
|
||||
command: ./aio/scripts/test-production.sh https://angular.io/ $CI_AIO_MIN_PWA_SCORE
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_CARETAKER_WEBHOOK_URL
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
aio_monitoring_next:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run:
|
||||
name: Run tests against https://next.angular.io/
|
||||
command: ./aio/scripts/test-production.sh https://next.angular.io/ $CI_AIO_MIN_PWA_SCORE
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_CARETAKER_WEBHOOK_URL
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
legacy-unit-tests-saucelabs:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Starting Saucelabs tunnel service
|
||||
command: ./tools/saucelabs/sauce-service.sh run
|
||||
background: true
|
||||
- run: yarn tsc -p packages
|
||||
- run: yarn tsc -p modules
|
||||
- run: yarn bazel build //packages/zone.js:npm_package
|
||||
- run:
|
||||
# Waiting on ready ensures that we don't run tests too early without Saucelabs not being ready.
|
||||
name: Waiting for Saucelabs tunnel to connect
|
||||
command: ./tools/saucelabs/sauce-service.sh ready-wait
|
||||
- run:
|
||||
name: Running tests on Saucelabs.
|
||||
command: |
|
||||
browsers=$(node -e 'console.log(require("./browser-providers.conf").sauceAliases.CI_REQUIRED.join(","))')
|
||||
yarn karma start ./karma-js.conf.js --single-run --browsers=${browsers}
|
||||
- run:
|
||||
name: Stop Saucelabs tunnel service
|
||||
command: ./tools/saucelabs/sauce-service.sh stop
|
||||
|
||||
# Job that runs all unit tests of the `angular/components` repository.
|
||||
components-repo-unit-tests:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Restore the cache before cloning the repository because the clone script re-uses
|
||||
# the restored repository if present. This reduces the amount of times the components
|
||||
# repository needs to be cloned (this is slow and increases based on commits in the repo).
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *components_repo_unit_tests_cache_key
|
||||
# Whenever the `angular/components` SHA is updated, the cache key will no longer
|
||||
# match. The fallback cache will still match, and CircleCI will restore the most
|
||||
# recently cached repository folder. Without the fallback cache, we'd need to download
|
||||
# the repository from scratch and it would slow down the job. This is because we can't
|
||||
# clone the repository with reduced `--depth`, but rather need to clone the whole
|
||||
# repository to be able to support arbitrary SHAs.
|
||||
- *components_repo_unit_tests_cache_key_fallback
|
||||
- run:
|
||||
name: "Fetching angular/components repository"
|
||||
command: ./scripts/ci/clone_angular_components_repo.sh
|
||||
- run:
|
||||
# Run yarn install to fetch the Bazel binaries as used in the components repo.
|
||||
name: Installing dependencies.
|
||||
# TODO: remove this once the repo has been updated to use NodeJS v12 and Yarn 1.19.1.
|
||||
# We temporarily ignore the "engines" because the Angular components repository has
|
||||
# minimum dependency on NodeJS v12 and Yarn 1.19.1, but the framework repository uses
|
||||
# older versions.
|
||||
command: yarn --ignore-engines --cwd ${COMPONENTS_REPO_TMP_DIR} install --frozen-lockfile --non-interactive
|
||||
- save_cache:
|
||||
key: *components_repo_unit_tests_cache_key
|
||||
paths:
|
||||
# Temporary directory must be kept in sync with the `$COMPONENTS_REPO_TMP_DIR` env
|
||||
# variable. It needs to be hardcoded here, because env variables interpolation is
|
||||
# not supported.
|
||||
- "/tmp/angular-components-repo"
|
||||
- run:
|
||||
# Updates the `angular/components` `package.json` file to refer to the release output
|
||||
# inside the `packages-dist` directory. Note that it's not necessary to perform a yarn
|
||||
# install as Bazel runs Yarn automatically when needed.
|
||||
name: Setting up release packages.
|
||||
command: node scripts/ci/update-deps-to-dist-packages.js ${COMPONENTS_REPO_TMP_DIR}/package.json dist/packages-dist/
|
||||
- run:
|
||||
name: "Running `angular/components` unit tests"
|
||||
command: ./scripts/ci/run_angular_components_unit_tests.sh
|
||||
|
||||
test_zonejs:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Install
|
||||
- run: yarn --cwd packages/zone.js install --frozen-lockfile --non-interactive
|
||||
# Run zone.js tools tests
|
||||
- run: yarn --cwd packages/zone.js promisetest
|
||||
- run: yarn --cwd packages/zone.js promisefinallytest
|
||||
- run: yarn bazel build //packages/zone.js:npm_package &&
|
||||
cp dist/bin/packages/zone.js/npm_package/dist/zone-mix.js ./packages/zone.js/test/extra/ &&
|
||||
cp dist/bin/packages/zone.js/npm_package/dist/zone-patch-electron.js ./packages/zone.js/test/extra/ &&
|
||||
yarn --cwd packages/zone.js electrontest
|
||||
- run: yarn --cwd packages/zone.js jesttest
|
||||
|
||||
# Windows jobs
|
||||
# Docs: https://circleci.com/docs/2.0/hello-world-windows/
|
||||
test_win:
|
||||
executor: windows-executor
|
||||
steps:
|
||||
- setup_win
|
||||
- run:
|
||||
# Ran into a command parsing problem where `-browser:chromium-local` was converted to
|
||||
# `-browser: chromium-local` (a space was added) in https://circleci.com/gh/angular/angular/357511.
|
||||
# Probably a powershell command parsing thing. There's no problem using a yarn script though.
|
||||
command: yarn circleci-win-ve
|
||||
no_output_timeout: 45m
|
||||
# Save bazel repository cache to use on subsequent runs.
|
||||
# We don't save node_modules because it's faster to use the linux workspace and reinstall.
|
||||
- save_cache:
|
||||
key: *cache_key_win
|
||||
paths:
|
||||
- "C:/Users/circleci/bazel_repository_cache"
|
||||
|
||||
test_ivy_aot_win:
|
||||
executor: windows-executor
|
||||
steps:
|
||||
- setup_win
|
||||
- run:
|
||||
command: yarn circleci-win-ivy
|
||||
no_output_timeout: 45m
|
||||
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
default_workflow:
|
||||
jobs:
|
||||
- setup:
|
||||
filters:
|
||||
branches:
|
||||
ignore: g3
|
||||
- lint:
|
||||
requires:
|
||||
- setup
|
||||
- test:
|
||||
requires:
|
||||
- setup
|
||||
- test_ivy_aot:
|
||||
requires:
|
||||
- setup
|
||||
- build-npm-packages:
|
||||
requires:
|
||||
- setup
|
||||
- build-ivy-npm-packages:
|
||||
requires:
|
||||
- setup
|
||||
- legacy-unit-tests-saucelabs:
|
||||
requires:
|
||||
- setup
|
||||
- test_aio:
|
||||
requires:
|
||||
- setup
|
||||
- deploy_aio:
|
||||
requires:
|
||||
- test_aio
|
||||
- test_aio_local:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_aio_local:
|
||||
name: test_aio_local_viewengine
|
||||
viewengine: true
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_aio_tools:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_docs_examples:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_docs_examples:
|
||||
name: test_docs_examples_viewengine
|
||||
viewengine: true
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- aio_preview:
|
||||
# Only run on PR builds. (There can be no previews for non-PR builds.)
|
||||
<<: *only_on_pull_requests
|
||||
requires:
|
||||
- setup
|
||||
- test_aio_preview:
|
||||
requires:
|
||||
- aio_preview
|
||||
- publish_packages_as_artifacts:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- publish_snapshot:
|
||||
# Note: no filters on this job because we want it to run for all upstream branches
|
||||
# We'd really like to filter out pull requests here, but not yet available:
|
||||
# https://discuss.circleci.com/t/workflows-pull-request-filter/14396/4
|
||||
# Instead, the job just exits immediately at the first step.
|
||||
requires:
|
||||
# Only publish if tests and integration tests pass
|
||||
- test
|
||||
- test_ivy_aot
|
||||
# Only publish if `aio`/`docs` tests using the locally built Angular packages pass
|
||||
- test_aio_local
|
||||
- test_aio_local_viewengine
|
||||
- test_docs_examples
|
||||
- test_docs_examples_viewengine
|
||||
# Get the artifacts to publish from the build-packages-dist job
|
||||
# since the publishing script expects the legacy outputs layout.
|
||||
- build-npm-packages
|
||||
- build-ivy-npm-packages
|
||||
- legacy-unit-tests-saucelabs
|
||||
- components-repo-unit-tests:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_zonejs:
|
||||
requires:
|
||||
- setup
|
||||
# Windows Jobs
|
||||
# These are very slow so we run them on non-PRs only for now.
|
||||
# TODO: remove the filter when CircleCI makes Windows FS faster.
|
||||
# The Windows jobs are only run after their non-windows counterparts finish successfully.
|
||||
# This isn't strictly necessary as there is no artifact dependency, but helps economize
|
||||
# CI resources by not attempting to build when we know should fail.
|
||||
- test_win:
|
||||
<<: *skip_on_pull_requests
|
||||
requires:
|
||||
- test
|
||||
- test_ivy_aot_win:
|
||||
<<: *skip_on_pull_requests
|
||||
requires:
|
||||
- test_ivy_aot
|
||||
|
||||
monitoring:
|
||||
jobs:
|
||||
- setup
|
||||
- aio_monitoring_stable:
|
||||
requires:
|
||||
- setup
|
||||
- aio_monitoring_next:
|
||||
requires:
|
||||
- setup
|
||||
- saucelabs_ivy:
|
||||
# Testing saucelabs via Bazel currently taking longer than the legacy saucelabs job as it
|
||||
# each karma_web_test target is provisioning and tearing down browsers which is adding
|
||||
# a lot of overhead. Running once daily on master only to avoid wasting resources and
|
||||
# slowing down CI for PRs.
|
||||
# TODO: Run this job on all branches (including PRs) once karma_web_test targets can
|
||||
# share provisioned browsers and we can remove the legacy saucelabs job.
|
||||
requires:
|
||||
- setup
|
||||
- saucelabs_view_engine:
|
||||
# Testing saucelabs via Bazel currently taking longer than the legacy saucelabs job as it
|
||||
# each karma_web_test target is provisioning and tearing down browsers which is adding
|
||||
# a lot of overhead. Running once daily on master only to avoid wasting resources and
|
||||
# slowing down CI for PRs.
|
||||
# TODO: Run this job on all branches (including PRs) once karma_web_test targets can
|
||||
# share provisioned browsers and we can remove the legacy saucelabs job.
|
||||
requires:
|
||||
- setup
|
||||
triggers:
|
||||
- schedule:
|
||||
<<: *only_on_master
|
||||
# Runs monitoring jobs at 10:00AM every day.
|
||||
cron: "0 10 * * *"
|
@ -1,73 +0,0 @@
|
||||
####################################################################################################
|
||||
# Helpers for defining environment variables for CircleCI.
|
||||
#
|
||||
# In CircleCI, each step runs in a new shell. The way to share ENV variables across steps is to
|
||||
# export them from `$BASH_ENV`, which is automatically sourced at the beginning of every step (for
|
||||
# the default `bash` shell).
|
||||
#
|
||||
# See also https://circleci.com/docs/2.0/env-vars/#using-bash_env-to-set-environment-variables.
|
||||
####################################################################################################
|
||||
|
||||
# Set and print an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are public, i.e. it is OK for them to be
|
||||
# visible to anyone through the CI logs.
|
||||
#
|
||||
# Usage: `setPublicVar <name> <value>`
|
||||
function setPublicVar() {
|
||||
setSecretVar $1 "$2";
|
||||
echo "$1=$2";
|
||||
}
|
||||
|
||||
# Set (without printing) an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are secret, i.e. should not be visible to
|
||||
# everyone through the CI logs.
|
||||
#
|
||||
# Usage: `setSecretVar <name> <value>`
|
||||
function setSecretVar() {
|
||||
# WARNING: Secrets (e.g. passwords, access tokens) should NOT be printed.
|
||||
# (Keep original shell options to restore at the end.)
|
||||
local -r originalShellOptions=$(set +o);
|
||||
set +x -eu -o pipefail;
|
||||
|
||||
echo "export $1=\"${2:-}\";" >> $BASH_ENV;
|
||||
|
||||
# Restore original shell options.
|
||||
eval "$originalShellOptions";
|
||||
}
|
||||
|
||||
|
||||
# Create a function to set an environment variable, when called.
|
||||
#
|
||||
# Use this function for creating setter for public environment variables that require expensive or
|
||||
# time-consuming computaions and may not be needed. When needed, you can call this function to set
|
||||
# the environment variable (which will be available through `$BASH_ENV` from that point onwards).
|
||||
#
|
||||
# Arguments:
|
||||
# - `<name>`: The name of the environment variable. The generated setter function will be
|
||||
# `setPublicVar_<name>`.
|
||||
# - `<code>`: The code to run to compute the value for the variable. Since this code should be
|
||||
# executed lazily, it must be properly escaped. For example:
|
||||
# ```sh
|
||||
# # DO NOT do this:
|
||||
# createPublicVarSetter MY_VAR "$(whoami)"; # `whoami` will be evaluated eagerly
|
||||
#
|
||||
# # DO this isntead:
|
||||
# createPublicVarSetter MY_VAR "\$(whoami)"; # `whoami` will NOT be evaluated eagerly
|
||||
# ```
|
||||
#
|
||||
# Usage: `createPublicVarSetter <name> <code>`
|
||||
#
|
||||
# Example:
|
||||
# ```sh
|
||||
# createPublicVarSetter MY_VAR 'echo "FOO"';
|
||||
# echo $MY_VAR; # Not defined
|
||||
#
|
||||
# setPublicVar_MY_VAR;
|
||||
# source $BASH_ENV;
|
||||
# echo $MY_VAR; # FOO
|
||||
# ```
|
||||
function createPublicVarSetter() {
|
||||
echo "setPublicVar_$1() { setPublicVar $1 \"$2\"; }" >> $BASH_ENV;
|
||||
}
|
113
.circleci/env.sh
113
.circleci/env.sh
@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Variables
|
||||
readonly projectDir=$(realpath "$(dirname ${BASH_SOURCE[0]})/..")
|
||||
readonly envHelpersPath="$projectDir/.circleci/env-helpers.inc.sh";
|
||||
readonly bashEnvCachePath="$projectDir/.circleci/bash_env_cache";
|
||||
|
||||
# Load helpers and make them available everywhere (through `$BASH_ENV`).
|
||||
source $envHelpersPath;
|
||||
echo "source $envHelpersPath;" >> $BASH_ENV;
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define PUBLIC environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
# See https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables for more info.
|
||||
####################################################################################################
|
||||
setPublicVar CI "$CI"
|
||||
setPublicVar PROJECT_ROOT "$projectDir";
|
||||
setPublicVar CI_AIO_MIN_PWA_SCORE "95";
|
||||
# This is the branch being built; e.g. `pull/12345` for PR builds.
|
||||
setPublicVar CI_BRANCH "$CIRCLE_BRANCH";
|
||||
setPublicVar CI_BUILD_URL "$CIRCLE_BUILD_URL";
|
||||
setPublicVar CI_COMMIT "$CIRCLE_SHA1";
|
||||
# `CI_COMMIT_RANGE` is only used on push builds (a.k.a. non-PR, non-scheduled builds and rerun
|
||||
# workflows of such builds).
|
||||
setPublicVar CI_GIT_BASE_REVISION "${CIRCLE_GIT_BASE_REVISION}";
|
||||
setPublicVar CI_GIT_REVISION "${CIRCLE_GIT_REVISION}";
|
||||
setPublicVar CI_COMMIT_RANGE "$CIRCLE_GIT_BASE_REVISION..$CIRCLE_GIT_REVISION";
|
||||
setPublicVar CI_PULL_REQUEST "${CIRCLE_PR_NUMBER:-false}";
|
||||
setPublicVar CI_REPO_NAME "$CIRCLE_PROJECT_REPONAME";
|
||||
setPublicVar CI_REPO_OWNER "$CIRCLE_PROJECT_USERNAME";
|
||||
setPublicVar CI_PR_REPONAME "$CIRCLE_PR_REPONAME";
|
||||
setPublicVar CI_PR_USERNAME "$CIRCLE_PR_USERNAME";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define "lazy" PUBLIC environment variables for CircleCI.
|
||||
# (I.e. functions to set an environment variable when called.)
|
||||
####################################################################################################
|
||||
createPublicVarSetter CI_STABLE_BRANCH "\$(npm info @angular/core dist-tags.latest | sed -r 's/^\\s*([0-9]+\\.[0-9]+)\\.[0-9]+.*$/\\1.x/')";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SECRET environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setSecretVar CI_SECRET_AIO_DEPLOY_FIREBASE_TOKEN "$AIO_DEPLOY_TOKEN";
|
||||
setSecretVar CI_SECRET_PAYLOAD_FIREBASE_TOKEN "$ANGULAR_PAYLOAD_TOKEN";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SauceLabs environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setPublicVar SAUCE_USERNAME "angular-framework";
|
||||
setSecretVar SAUCE_ACCESS_KEY "0c731274ed5f-cbc9-16f4-021a-9835e39f";
|
||||
# TODO(josephperrott): Remove environment variables once all saucelabs tests are via bazel method.
|
||||
setPublicVar SAUCE_LOG_FILE /tmp/angular/sauce-connect.log
|
||||
setPublicVar SAUCE_READY_FILE /tmp/angular/sauce-connect-ready-file.lock
|
||||
setPublicVar SAUCE_PID_FILE /tmp/angular/sauce-connect-pid-file.lock
|
||||
setPublicVar SAUCE_TUNNEL_IDENTIFIER "angular-framework-${CIRCLE_BUILD_NUM}-${CIRCLE_NODE_INDEX}"
|
||||
# Amount of seconds we wait for sauceconnect to establish a tunnel instance. In order to not
|
||||
# acquire CircleCI instances for too long if sauceconnect failed, we need a connect timeout.
|
||||
setPublicVar SAUCE_READY_FILE_TIMEOUT 120
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define environment variables for the `angular/components` repo unit tests job.
|
||||
####################################################################################################
|
||||
# We specifically use a directory within "/tmp" here because we want the cloned repo to be
|
||||
# completely isolated from angular/angular in order to avoid any bad interactions between
|
||||
# their separate build setups. **NOTE**: When updating the temporary directory, also update
|
||||
# the `save_cache` path configuration in `config.yml`
|
||||
setPublicVar COMPONENTS_REPO_TMP_DIR "/tmp/angular-components-repo"
|
||||
setPublicVar COMPONENTS_REPO_URL "https://github.com/angular/components.git"
|
||||
setPublicVar COMPONENTS_REPO_BRANCH "master"
|
||||
# **NOTE**: When updating the commit SHA, also update the cache key in the CircleCI `config.yml`.
|
||||
setPublicVar COMPONENTS_REPO_COMMIT "448523bffffecd2b53a3d2854c3051b6b7a3934f"
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir -p "$HOME/.config/gcloud";
|
||||
openssl aes-256-cbc -d -in "${projectDir}/.circleci/gcp_token" \
|
||||
-md md5 -k "$CIRCLE_PROJECT_REPONAME" -out "$HOME/.config/gcloud/application_default_credentials.json"
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
cp "${projectDir}/.circleci/bazel.linux.rc" "$HOME/.bazelrc";
|
||||
|
||||
####################################################################################################
|
||||
# Create shell script in /tmp for Bazel actions to access CI envs without
|
||||
# busting the cache. Used by payload-size.sh script in integration tests.
|
||||
####################################################################################################
|
||||
readonly bazelVarEnv="/tmp/bazel-ci-env.sh"
|
||||
echo "# Setup by /.circle/env.sh" > $bazelVarEnv
|
||||
echo "export PROJECT_ROOT=\"${PROJECT_ROOT}\";" >> $bazelVarEnv
|
||||
echo "export CI_BRANCH=\"${CI_BRANCH}\";" >> $bazelVarEnv
|
||||
echo "export CI_BUILD_URL=\"${CI_BUILD_URL}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT=\"${CI_COMMIT}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT_RANGE=\"${CI_COMMIT_RANGE}\";" >> $bazelVarEnv
|
||||
echo "export CI_PULL_REQUEST=\"${CI_PULL_REQUEST}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_NAME=\"${CI_REPO_NAME}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_OWNER=\"${CI_REPO_OWNER}\";" >> $bazelVarEnv
|
||||
echo "export CI_SECRET_PAYLOAD_FIREBASE_TOKEN=\"${CI_SECRET_PAYLOAD_FIREBASE_TOKEN}\";" >> $bazelVarEnv
|
||||
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
## Source `$BASH_ENV` to make the variables available immediately. ##
|
||||
## ***NOTE: This must remain the the last action in this script*** ##
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
source $BASH_ENV;
|
Binary file not shown.
Binary file not shown.
@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Install bazel remote cache proxy
|
||||
# This is temporary until the feature is no longer experimental on CircleCI.
|
||||
# See remote cache documentation in /docs/BAZEL.md
|
||||
|
||||
set -u -e
|
||||
|
||||
readonly DOWNLOAD_URL="https://5-116431813-gh.circle-artifacts.com/0/pkg/bazel-remote-proxy-$(uname -s)_$(uname -m)"
|
||||
|
||||
curl --fail -o ~/bazel-remote-proxy "$DOWNLOAD_URL"
|
||||
chmod +x ~/bazel-remote-proxy
|
@ -1,107 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Usage (cli):
|
||||
* ```
|
||||
* node create-preview <build-number> <job-name> <webhook-url>
|
||||
* ```
|
||||
*
|
||||
* Usage (JS):
|
||||
* ```js
|
||||
* require('./trigger-webhook').
|
||||
* triggerWebhook(buildNumber, jobName, webhookUrl).
|
||||
* then(...);
|
||||
* ```
|
||||
*
|
||||
* Triggers a notification webhook with CircleCI specific info.
|
||||
*
|
||||
* It can be used for notifying external servers and trigger operations based on CircleCI job status
|
||||
* (e.g. triggering the creation of a preview based on previously stored build atrifacts).
|
||||
*
|
||||
* The body of the sent payload is of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "payload": {
|
||||
* "build_num": ${buildNumber}
|
||||
* "build_parameters": {
|
||||
* "CIRCLE_JOB": "${jobName}"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* When used from JS, it returns a promise which resolves to an object of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "statucCode": ${statusCode},
|
||||
* "responseText": "${responseText}"
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* NOTE:
|
||||
* - When used from the cli, the command will exit with an error code if the response's status code
|
||||
* is outside the [200, 400) range.
|
||||
* - When used from JS, the returned promise will be resolved, even if the response's status code is
|
||||
* outside the [200, 400) range. It is up to the caller to decide how this should be handled.
|
||||
*/
|
||||
|
||||
// Imports
|
||||
const {request} = require('https');
|
||||
|
||||
// Exports
|
||||
module.exports = {
|
||||
triggerWebhook,
|
||||
};
|
||||
|
||||
// Run
|
||||
if (require.resolve === module) {
|
||||
_main(process.argv.slice(2));
|
||||
}
|
||||
|
||||
// Helpers
|
||||
function _main(args) {
|
||||
triggerWebhook(...args).
|
||||
then(({statusCode, responseText}) => (200 <= statusCode && statusCode < 400) ?
|
||||
console.log(`Status: ${statusCode}\n${responseText}`) :
|
||||
Promise.reject(new Error(`Request failed (status: ${statusCode}): ${responseText}`))).
|
||||
catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function postJson(url, data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const opts = {method: 'post', headers: {'Content-Type': 'application/json'}};
|
||||
const onResponse = res => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
let responseText = '';
|
||||
|
||||
res.
|
||||
on('error', reject).
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => resolve({statusCode, responseText}));
|
||||
};
|
||||
|
||||
request(url, opts, onResponse).
|
||||
on('error', reject).
|
||||
end(JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
async function triggerWebhook(buildNumber, jobName, webhookUrl) {
|
||||
if (!buildNumber || !jobName || !webhookUrl || isNaN(buildNumber)) {
|
||||
throw new Error(
|
||||
'Missing or invalid arguments.\n' +
|
||||
'Expected: buildNumber (number), jobName (string), webhookUrl (string)');
|
||||
}
|
||||
|
||||
const data = {
|
||||
payload: {
|
||||
build_num: +buildNumber,
|
||||
build_parameters: {CIRCLE_JOB: jobName},
|
||||
},
|
||||
};
|
||||
|
||||
return postJson(webhookUrl, data);
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
# Install Bazel pre-reqs on Windows
|
||||
# https://docs.bazel.build/versions/master/install-windows.html
|
||||
# https://docs.bazel.build/versions/master/windows.html
|
||||
# Install MSYS2 and packages
|
||||
choco install msys2 --version 20180531.0.0 --no-progress --package-parameters "/NoUpdate"
|
||||
C:\tools\msys64\usr\bin\bash.exe -l -c "pacman --needed --noconfirm -S zip unzip patch diffutils git"
|
||||
|
||||
# Add PATH modifications to the Powershell profile. This is the win equivalent of .bash_profile.
|
||||
# https://docs.microsoft.com/en-us/previous-versions//bb613488(v=vs.85)
|
||||
new-item -path $profile -itemtype file -force
|
||||
# Paths for nodejs, npm, yarn, and msys2. Use single quotes to prevent interpolation.
|
||||
# Add before the original path to use msys2 instead of the installed gitbash.
|
||||
Add-Content $profile '$Env:path = "${Env:ProgramFiles}\nodejs\;C:\Users\circleci\AppData\Roaming\npm\;${Env:ProgramFiles(x86)}\Yarn\bin\;C:\Users\circleci\AppData\Local\Yarn\bin\;C:\tools\msys64\usr\bin\;" + $Env:path'
|
||||
# Environment variables for Bazel
|
||||
Add-Content $profile '$Env:BAZEL_SH = "C:\tools\msys64\usr\bin\bash.exe"'
|
||||
|
||||
# Get the bazelisk version devdep and store it in a global var for use in the circleci job.
|
||||
$bazeliskVersion = & ${Env:ProgramFiles}\nodejs\node.exe -e "console.log(require('./package.json').devDependencies['@bazel/bazelisk'])"
|
||||
# This is a tricky situation: we want $bazeliskVersion to be evaluated but not $Env:BAZELISK_VERSION.
|
||||
# Formatting works https://stackoverflow.com/questions/32127583/expand-variable-inside-single-quotes
|
||||
$bazeliskVersionGlobalVar = '$Env:BAZELISK_VERSION = "{0}"' -f $bazeliskVersion
|
||||
Add-Content $profile $bazeliskVersionGlobalVar
|
||||
|
||||
# Remove the CircleCI checkout SSH override, because it breaks cloning repositories through Bazel.
|
||||
# See https://circleci.com/gh/angular/angular/401454 for an example.
|
||||
# TODO: is this really needed? Maybe there's a better way. It doesn't happen on Linux or on Codefresh.
|
||||
git config --global --unset url.ssh://git@github.com.insteadOf
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir ${env:APPDATA}\gcloud
|
||||
openssl aes-256-cbc -d -in .circleci\gcp_token -md md5 -out "$env:APPDATA\gcloud\application_default_credentials.json" -k "$env:CIRCLE_PROJECT_REPONAME"
|
||||
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
copy .circleci\bazel.windows.rc ${Env:USERPROFILE}\.bazelrc
|
||||
|
||||
####################################################################################################
|
||||
# Install specific version of node.
|
||||
####################################################################################################
|
||||
choco install nodejs --version 12.14.1 --no-progress
|
||||
|
||||
# These Bazel prereqs aren't needed because the CircleCI image already includes them.
|
||||
# choco install yarn --version 1.16.0 --no-progress
|
||||
# choco install vcredist2015 --version 14.0.24215.20170201
|
||||
|
||||
# We don't need VS Build Tools for the tested bazel targets.
|
||||
# If it's needed again, uncomment these lines.
|
||||
# VS Build Tools are needed for Bazel C++ targets (like com_google_protobuf)
|
||||
# choco install visualstudio2019buildtools --version 16.1.2.0 --no-progress --package-parameters "--add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --add Microsoft.Component.VC.Runtime.UCRTSDK --add Microsoft.VisualStudio.Component.Windows10SDK.17763"
|
||||
# Add-Content $profile '$Env:BAZEL_VC = "${Env:ProgramFiles(x86)}\Microsoft Visual Studio\2019\BuildTools\VC\"'
|
||||
# Python is needed for Bazel Python targets
|
||||
# choco install python --version 3.5.1 --no-progress
|
@ -1,3 +0,0 @@
|
||||
Language: JavaScript
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 100
|
@ -1,31 +0,0 @@
|
||||
# VSCode Remote Development - Developing inside a Containers
|
||||
|
||||
This folder contains configuration files that can be used to opt into working on this repository in a [Docker container](https://www.docker.com/resources/what-container) via [VSCode](https://code.visualstudio.com/)'s Remote Development feature (see below).
|
||||
|
||||
Info on remote development and developing inside a container with VSCode:
|
||||
- [VSCode: Remote Development](https://code.visualstudio.com/docs/remote/remote-overview)
|
||||
- [VSCode: Developing inside a Container](https://code.visualstudio.com/docs/remote/containers)
|
||||
- [VSCode: Remote Development FAQ](https://code.visualstudio.com/docs/remote/faq)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
_Prerequisite: [Install Docker](https://docs.docker.com/install) on your local environment._
|
||||
|
||||
To get started, read and follow the instuctions in [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The [.devcontainer/](.) directory contains pre-configured `devcontainer.json` and `Dockerfile` files, which you can use to set up remote development with a docker container.
|
||||
|
||||
In a nutshell, you need to:
|
||||
- Install the [Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension.
|
||||
- Copy [recommended-Dockerfile](./recommended-Dockerfile) to `Dockerfile` (and optionally tweak to suit your needs).
|
||||
- Copy [recommended-devcontainer.json](./recommended-devcontainer.json) to `devcontainer.json` (and optionally tweak to suit your needs).
|
||||
- Open VSCode and bring up the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
- Type `Remote-Containers: Open Folder in Container` and choose your local clone of [angular/angular](https://github.com/angular/angular).
|
||||
|
||||
The `.devcontainer/devcontainer.json` and `.devcontainer/Dockerfile` files are ignored by git, so you can have your own local versions. We may occasionally update the template files ([recommended-devcontainer.json](./recommended-devcontainer.json), [recommended-Dockerfile](./recommended-Dockerfile)), in which case you will need to manually update your local copies (if desired).
|
||||
|
||||
|
||||
## Updating `recommended-devcontainer.json` and `recommended-Dockerfile`
|
||||
|
||||
You can update and commit the recommended config files (which people use as basis for their local configs), if you find that something is broken, out-of-date or can be improved.
|
||||
|
||||
Please, keep in mind that any changes you make will potentially be used by many people on different environments. Try to keep these config files cross-platform compatible and free of personal preferences.
|
@ -1,24 +0,0 @@
|
||||
# Image metadata and config.
|
||||
FROM circleci/node:10-browsers # Ideally, the image version should be what we use on CI.
|
||||
# See `executors > browsers-executor` in `.circleci/config.yml`.
|
||||
|
||||
LABEL name="Angular dev environment" \
|
||||
description="This image can be used to create a dev environment for building Angular." \
|
||||
vendor="angular" \
|
||||
version="1.0"
|
||||
|
||||
EXPOSE 4000 4200 4433 5000 8080 9876
|
||||
|
||||
|
||||
# Switch to `root` (CircleCI images use `circleci` as the user).
|
||||
USER root
|
||||
|
||||
|
||||
# Configure `Node.js`/`npm` and install utilities.
|
||||
RUN npm config --global set user root
|
||||
RUN npm install --global yarn@latest # Ideally, the version should be what we use on CI.
|
||||
# See `commands > overwrite_yarn` in `.circleci/config.yml`.
|
||||
|
||||
|
||||
# Go! (And keep going.)
|
||||
CMD ["tail", "--follow", "/dev/null"]
|
@ -1,16 +0,0 @@
|
||||
// Reference: https://code.visualstudio.com/docs/remote/containers#_devcontainerjson-reference
|
||||
{
|
||||
"name": "Angular dev container",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": [4000, 4200, 4433, 5000, 8080, 9876],
|
||||
"postCreateCommand": "yarn install",
|
||||
"extensions": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
# https://editorconfig.org
|
||||
# http://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
|
9
.gitattributes
vendored
9
.gitattributes
vendored
@ -1,12 +1,5 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
|
||||
# JS and TS files must always use LF for tools to work
|
||||
# JS files must always use LF for tools to work
|
||||
*.js eol=lf
|
||||
*.ts eol=lf
|
||||
|
||||
# API guardian patch must always use LF for tests to work
|
||||
*.patch eol=lf
|
||||
|
||||
# Must keep Windows line ending to be parsed correctly
|
||||
scripts/windows/packages.txt eol=crlf
|
||||
|
10
.github/ISSUE_TEMPLATE.md
vendored
10
.github/ISSUE_TEMPLATE.md
vendored
@ -1,10 +0,0 @@
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please help us process issues more efficiently by filing an
|
||||
issue using one of the following templates:
|
||||
|
||||
https://github.com/angular/angular/issues/new/choose
|
||||
|
||||
Thank you!
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
69
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
69
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
@ -1,69 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41EBug report"
|
||||
about: Report a bug in the Angular Framework
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
|
||||
# 🐞 bug report
|
||||
|
||||
### Affected Package
|
||||
<!-- Can you pin-point one or more @angular/* packages as the source of the bug? -->
|
||||
<!-- ✍️edit: --> The issue is caused by package @angular/....
|
||||
|
||||
|
||||
### Is this a regression?
|
||||
|
||||
<!-- Did this behavior use to work in the previous version? -->
|
||||
<!-- ✍️--> Yes, the previous version in which this bug was not present was: ....
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️--> A clear and concise description of the problem...
|
||||
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
<!--
|
||||
Please create and share minimal reproduction of the issue starting with this template: https://stackblitz.com/fork/angular-issue-repro2
|
||||
-->
|
||||
<!-- ✍️--> https://stackblitz.com/...
|
||||
|
||||
<!--
|
||||
If StackBlitz is not suitable for reproduction of your issue, please create a minimal GitHub repository with the reproduction of the issue.
|
||||
A good way to make a minimal reproduction is to create a new app via `ng new repro-app` and add the minimum possible code to show the problem.
|
||||
Share the link to the repo below along with step-by-step instructions to reproduce the problem, as well as expected and actual behavior.
|
||||
|
||||
Issues that don't have enough info and can't be reproduced will be closed.
|
||||
|
||||
You can read more about issue submission guidelines here: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#-submitting-an-issue
|
||||
-->
|
||||
|
||||
## 🔥 Exception or Error
|
||||
<pre><code>
|
||||
<!-- If the issue is accompanied by an exception or an error, please share it below: -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
## 🌍 Your Environment
|
||||
|
||||
**Angular Version:**
|
||||
<pre><code>
|
||||
<!-- run `ng version` and paste output below -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
**Anything else relevant?**
|
||||
<!-- ✍️Is this a browser specific issue? If so, please specify the browser and version. -->
|
||||
|
||||
<!-- ✍️Do any of these matter: operating system, IDE, package manager, HTTP server, ...? If so, please mention it below. -->
|
32
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
32
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
@ -1,32 +0,0 @@
|
||||
---
|
||||
name: "\U0001F680Feature request"
|
||||
about: Suggest a feature for Angular Framework
|
||||
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
|
||||
# 🚀 feature request
|
||||
|
||||
### Relevant Package
|
||||
<!-- Can you pin-point one or more @angular/* packages the are relevant for this feature request? -->
|
||||
<!-- ✍️edit: --> This feature request is for @angular/....
|
||||
|
||||
|
||||
### Description
|
||||
<!-- ✍️--> A clear and concise description of the problem or missing capability...
|
||||
|
||||
|
||||
### Describe the solution you'd like
|
||||
<!-- ✍️--> If you have a solution in mind, please describe it.
|
||||
|
||||
|
||||
### Describe alternatives you've considered
|
||||
<!-- ✍️--> Have you considered any alternative solutions or workarounds?
|
55
.github/ISSUE_TEMPLATE/3-docs-bug.md
vendored
55
.github/ISSUE_TEMPLATE/3-docs-bug.md
vendored
@ -1,55 +0,0 @@
|
||||
---
|
||||
name: "📚 Docs or angular.io issue report"
|
||||
about: Report an issue in Angular's documentation or angular.io application
|
||||
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
# 📚 Docs or angular.io bug report
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️edit:--> A clear and concise description of the problem...
|
||||
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
|
||||
### What's the affected URL?**
|
||||
<!-- ✍️edit:--> https://angular.io/...
|
||||
|
||||
### Reproduction Steps**
|
||||
<!-- If applicable please list the steps to take to reproduce the issue -->
|
||||
<!-- ✍️edit:-->
|
||||
|
||||
### Expected vs Actual Behavior**
|
||||
<!-- If applicable please describe the difference between the expected and actual behavior after following the repro steps. -->
|
||||
<!-- ✍️edit:-->
|
||||
|
||||
|
||||
## 📷Screenshot
|
||||
<!-- Often a screenshot can help to capture the issue better than a long description. -->
|
||||
<!-- ✍️upload a screenshot:-->
|
||||
|
||||
|
||||
## 🔥 Exception or Error
|
||||
<pre><code>
|
||||
<!-- If the issue is accompanied by an exception or an error, please share it below: -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
## 🌍 Your Environment
|
||||
|
||||
### Browser info
|
||||
<!-- ✍️Is this a browser specific issue? If so, please specify the device, browser, and version. -->
|
||||
|
||||
### Anything else relevant?
|
||||
<!-- ✍️Please provide additional info if necessary. -->
|
@ -1,11 +0,0 @@
|
||||
---
|
||||
name: ⚠️ Security issue disclosure
|
||||
about: Report a security issue in Angular Framework, Material, or CLI
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please read https://angular.io/guide/security#report-issues on how to disclose security related issues.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
16
.github/ISSUE_TEMPLATE/5-support-request.md
vendored
16
.github/ISSUE_TEMPLATE/5-support-request.md
vendored
@ -1,16 +0,0 @@
|
||||
---
|
||||
name: "❓Support request"
|
||||
about: Questions and requests for support
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please do not file questions or support requests on the GitHub issues tracker.
|
||||
|
||||
You can get your questions answered using other communication channels. Please see:
|
||||
https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question
|
||||
|
||||
Thank you!
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
13
.github/ISSUE_TEMPLATE/6-angular-cli.md
vendored
13
.github/ISSUE_TEMPLATE/6-angular-cli.md
vendored
@ -1,13 +0,0 @@
|
||||
---
|
||||
name: "\U0001F6E0️Angular CLI"
|
||||
about: Issues and feature requests for Angular CLI
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please file any Angular CLI issues at: https://github.com/angular/angular-cli/issues/new
|
||||
|
||||
For the time being, we keep Angular CLI issues in a separate repository.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
13
.github/ISSUE_TEMPLATE/7-angular-components.md
vendored
13
.github/ISSUE_TEMPLATE/7-angular-components.md
vendored
@ -1,13 +0,0 @@
|
||||
---
|
||||
name: "\U0001F48EAngular Components"
|
||||
about: Issues and feature requests for Angular Components
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please file any Angular Components issues at: https://github.com/angular/components/issues/new
|
||||
|
||||
For the time being, we keep Angular Components issues in a separate repository.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,43 +0,0 @@
|
||||
## PR Checklist
|
||||
Please check if your PR fulfills the following requirements:
|
||||
|
||||
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
||||
- [ ] Tests for the changes have been added (for bug fixes / features)
|
||||
- [ ] Docs have been added / updated (for bug fixes / features)
|
||||
|
||||
|
||||
## PR Type
|
||||
What kind of change does this PR introduce?
|
||||
|
||||
<!-- Please check the one that applies to this PR using "x". -->
|
||||
|
||||
- [ ] Bugfix
|
||||
- [ ] Feature
|
||||
- [ ] Code style update (formatting, local variables)
|
||||
- [ ] Refactoring (no functional changes, no api changes)
|
||||
- [ ] Build related changes
|
||||
- [ ] CI related changes
|
||||
- [ ] Documentation content changes
|
||||
- [ ] angular.io application / infrastructure changes
|
||||
- [ ] Other... Please describe:
|
||||
|
||||
|
||||
## What is the current behavior?
|
||||
<!-- Please describe the current behavior that you are modifying, or link to a relevant issue. -->
|
||||
|
||||
Issue Number: N/A
|
||||
|
||||
|
||||
## What is the new behavior?
|
||||
|
||||
|
||||
## Does this PR introduce a breaking change?
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
|
||||
<!-- If this PR contains a breaking change, please describe the impact and migration path for existing applications below. -->
|
||||
|
||||
|
||||
## Other information
|
183
.github/angular-robot.yml
vendored
183
.github/angular-robot.yml
vendored
@ -1,183 +0,0 @@
|
||||
# Configuration for angular-robot
|
||||
|
||||
#options for the size plugin
|
||||
size:
|
||||
disabled: false
|
||||
maxSizeIncrease: 2000
|
||||
circleCiStatusName: "ci/circleci: test_ivy_aot"
|
||||
|
||||
# options for the merge plugin
|
||||
merge:
|
||||
# the status will be added to your pull requests
|
||||
status:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the name of the status
|
||||
context: "ci/angular: merge status"
|
||||
# text to show when all checks pass
|
||||
successText: "All checks passed!"
|
||||
# text to show when some checks are failing
|
||||
failureText: "The following checks are failing:"
|
||||
|
||||
# the g3 status will be added to your pull requests if they include files that match the patterns
|
||||
g3Status:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the name of the status
|
||||
context: "google3"
|
||||
# text to show when the status is pending, {{PRNumber}} will be replaced by the PR number
|
||||
pendingDesc: "Googler: run g3sync presubmit {{PRNumber}}"
|
||||
# text to show when the status is success
|
||||
successDesc: "Does not affect google3"
|
||||
# link to use for the details
|
||||
url: "http://go/angular/g3sync"
|
||||
# list of patterns to check for the files changed by the PR
|
||||
# this list must be manually kept in sync with google3/third_party/javascript/angular2/copy.bara.sky
|
||||
include:
|
||||
- "LICENSE"
|
||||
- "modules/benchmarks/**"
|
||||
- "modules/system.d.ts"
|
||||
- "packages/**"
|
||||
# list of patterns to ignore for the files changed by the PR
|
||||
exclude:
|
||||
- "packages/*"
|
||||
- "packages/bazel/*"
|
||||
- "packages/bazel/src/api-extractor/**"
|
||||
- "packages/bazel/src/builders/**"
|
||||
- "packages/bazel/src/ng_package/**"
|
||||
- "packages/bazel/src/protractor/**"
|
||||
- "packages/bazel/src/schematics/**"
|
||||
- "packages/compiler-cli/ngcc/**"
|
||||
- "packages/docs/**"
|
||||
- "packages/elements/schematics/**"
|
||||
- "packages/examples/**"
|
||||
- "packages/language-service/**"
|
||||
- "packages/localize/**"
|
||||
- "packages/private/**"
|
||||
- "packages/service-worker/**"
|
||||
- "**/.gitignore"
|
||||
- "**/.gitkeep"
|
||||
- "**/yarn.lock"
|
||||
- "**/package.json"
|
||||
- "**/third_party/**"
|
||||
- "**/tsconfig-build.json"
|
||||
- "**/tsconfig.json"
|
||||
- "**/rollup.config.js"
|
||||
- "**/BUILD.bazel"
|
||||
- "**/*.md"
|
||||
- "packages/**/integrationtest/**"
|
||||
- "packages/**/test/**"
|
||||
- "packages/zone.js/*"
|
||||
- "packages/zone.js/doc/**"
|
||||
- "packages/zone.js/example/**"
|
||||
- "packages/zone.js/scripts/**"
|
||||
|
||||
# comment that will be added to a PR when there is a conflict, leave empty or set to false to disable
|
||||
mergeConflictComment: "Hi @{{PRAuthor}}! This PR has merge conflicts due to recent upstream merges.
|
||||
\nPlease help to unblock it by resolving these conflicts. Thanks!"
|
||||
|
||||
# label to monitor
|
||||
mergeLabel: "PR action: merge"
|
||||
|
||||
# adding any of these labels will also add the merge label
|
||||
mergeLinkedLabels:
|
||||
- "PR action: merge-assistance"
|
||||
|
||||
# list of checks that will determine if the merge label can be added
|
||||
checks:
|
||||
|
||||
# require that the PR has reviews from all requested reviewers
|
||||
#
|
||||
# This enables us to request reviews from both eng and tech writers, or multiple eng folks, and prevents accidental merges.
|
||||
# Rather than merging PRs with pending reviews, if all approvals are obtained and additional reviews are not needed, any pending reviewers should be removed via GitHub UI (this also leaves an audit trail behind these decisions).
|
||||
requireReviews: true,
|
||||
|
||||
# whether the PR shouldn't have a conflict with the base branch
|
||||
noConflict: true
|
||||
# list of labels that a PR needs to have, checked with a regexp (e.g. "PR target:" will work for the label "PR target: master")
|
||||
requiredLabels:
|
||||
- "PR target: *"
|
||||
- "cla: yes"
|
||||
|
||||
# list of labels that a PR shouldn't have, checked after the required labels with a regexp
|
||||
forbiddenLabels:
|
||||
- "PR target: TBD"
|
||||
- "PR action: cleanup"
|
||||
- "PR action: review"
|
||||
- "PR state: blocked"
|
||||
- "cla: no"
|
||||
|
||||
# list of PR statuses that need to be successful
|
||||
requiredStatuses:
|
||||
- "ci/circleci: build"
|
||||
- "ci/circleci: lint"
|
||||
- "ci/circleci: publish_snapshot"
|
||||
- "ci/angular: size"
|
||||
- "cla/google"
|
||||
- "google3"
|
||||
- "pullapprove"
|
||||
|
||||
|
||||
# the comment that will be added when the merge label is added despite failing checks, leave empty or set to false to disable
|
||||
# {{MERGE_LABEL}} will be replaced by the value of the mergeLabel option
|
||||
# {{PLACEHOLDER}} will be replaced by the list of failing checks
|
||||
mergeRemovedComment: "I see that you just added the `{{MERGE_LABEL}}` label, but the following checks are still failing:
|
||||
\n{{PLACEHOLDER}}
|
||||
\n
|
||||
\n**If you want your PR to be merged, it has to pass all the CI checks.**
|
||||
\n
|
||||
\nIf you can't get the PR to a green state due to flakes or broken master, please try rebasing to master and/or restarting the CI job. If that fails and you believe that the issue is not due to your change, please contact the caretaker and ask for help."
|
||||
|
||||
# options for the triage plugin
|
||||
triage:
|
||||
# number of the milestone to apply when the issue has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the issue is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if an issue has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if an issue has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: bug/fix"
|
||||
- "severity*"
|
||||
- "freq*"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: feature"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: refactor"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: RFC / Discussion / question"
|
||||
- "comp: *"
|
||||
|
||||
# options for the triage PR plugin
|
||||
triagePR:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# number of the milestone to apply when the PR has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the PR is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if a PR has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if a PR has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: *"
|
||||
- "effort*"
|
||||
- "risk*"
|
||||
- "comp: *"
|
||||
|
||||
# options for rerunning CI
|
||||
rerunCircleCI:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the label which when added triggers a rerun of the default CircleCI workflow
|
||||
triggerRerunLabel: "PR action: rerun CI at HEAD"
|
15
.github/workflows/lock-closed.yml
vendored
15
.github/workflows/lock-closed.yml
vendored
@ -1,15 +0,0 @@
|
||||
name: Lock closed inactive issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 16:00 every day
|
||||
- cron: '0 16 * * *'
|
||||
|
||||
jobs:
|
||||
lock_closed:
|
||||
if: github.repository == 'angular/angular'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: angular/dev-infra/github-actions/lock-closed@66462f6
|
||||
with:
|
||||
lock-bot-key: ${{ secrets.LOCK_BOT_PRIVATE_KEY }}
|
46
.gitignore
vendored
46
.gitignore
vendored
@ -1,44 +1,26 @@
|
||||
# Don’t commit the following directories created by pub.
|
||||
/dist/
|
||||
packages/
|
||||
.buildlog
|
||||
node_modules
|
||||
bower_components
|
||||
.pub
|
||||
.DS_STORE
|
||||
|
||||
/dist/
|
||||
/bazel-out
|
||||
/integration/bazel/bazel-*
|
||||
*.log
|
||||
node_modules
|
||||
# Or the files created by dart2js.
|
||||
*.dart.js
|
||||
*.dart.precompiled.js
|
||||
*.js_
|
||||
*.js.deps
|
||||
*.js.map
|
||||
|
||||
# Include when developing application packages.
|
||||
pubspec.lock
|
||||
.c9
|
||||
.idea/
|
||||
.devcontainer/*
|
||||
!.devcontainer/README.md
|
||||
!.devcontainer/recommended-devcontainer.json
|
||||
!.devcontainer/recommended-Dockerfile
|
||||
.settings/
|
||||
.vscode/launch.json
|
||||
.vscode/settings.json
|
||||
.vscode/tasks.json
|
||||
*.swo
|
||||
modules/.settings
|
||||
modules/.vscode
|
||||
.vimrc
|
||||
.nvimrc
|
||||
|
||||
# Don't check in secret files
|
||||
*secret.js
|
||||
|
||||
# Ignore npm/yarn debug log
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
# build-analytics
|
||||
.build-analytics
|
||||
|
||||
# rollup-test output
|
||||
/modules/rollup-test/dist/
|
||||
|
||||
# User specific bazel settings
|
||||
.bazelrc.user
|
||||
|
||||
.notes.md
|
||||
baseline.json
|
||||
/docs/bower_components/
|
||||
|
@ -1,79 +0,0 @@
|
||||
// The configuration for `ng-dev commit-message` commands.
|
||||
const commitMessage = {
|
||||
'maxLength': 120,
|
||||
'minBodyLength': 100,
|
||||
'types': [
|
||||
'build',
|
||||
'ci',
|
||||
'docs',
|
||||
'feat',
|
||||
'fix',
|
||||
'perf',
|
||||
'refactor',
|
||||
'release',
|
||||
'style',
|
||||
'test',
|
||||
],
|
||||
'scopes': [
|
||||
'animations',
|
||||
'bazel',
|
||||
'benchpress',
|
||||
'changelog',
|
||||
'common',
|
||||
'compiler',
|
||||
'compiler-cli',
|
||||
'core',
|
||||
'dev-infra',
|
||||
'docs-infra',
|
||||
'elements',
|
||||
'forms',
|
||||
'http',
|
||||
'language-service',
|
||||
'localize',
|
||||
'ngcc',
|
||||
'packaging',
|
||||
'platform-browser',
|
||||
'platform-browser-dynamic',
|
||||
'platform-server',
|
||||
'platform-webworker',
|
||||
'platform-webworker-dynamic',
|
||||
'router',
|
||||
'service-worker',
|
||||
'upgrade',
|
||||
've',
|
||||
'zone.js',
|
||||
]
|
||||
};
|
||||
|
||||
// The configuration for `ng-dev format` commands.
|
||||
const format = {
|
||||
'clang-format': {
|
||||
'matchers': [
|
||||
'dev-infra/**/*.{js,ts}',
|
||||
'packages/**/*.{js,ts}',
|
||||
'!packages/zone.js',
|
||||
'!packages/common/locales/**/*.{js,ts}',
|
||||
'!packages/common/src/i18n/available_locales.ts',
|
||||
'!packages/common/src/i18n/currencies.ts',
|
||||
'!packages/common/src/i18n/locale_en.ts',
|
||||
'modules/benchmarks/**/*.{js,ts}',
|
||||
'modules/playground/**/*.{js,ts}',
|
||||
'tools/**/*.{js,ts}',
|
||||
'!tools/gulp-tasks/cldr/extract.js',
|
||||
'!tools/public_api_guard/**/*.d.ts',
|
||||
'!tools/ts-api-guardian/test/fixtures/**',
|
||||
'*.{js,ts}',
|
||||
'!**/node_modules/**',
|
||||
'!**/dist/**',
|
||||
'!**/built/**',
|
||||
'!shims_for_IE.js',
|
||||
]
|
||||
},
|
||||
'buildifier': true
|
||||
};
|
||||
|
||||
// Export function to build ng-dev configuration object.
|
||||
module.exports = {
|
||||
commitMessage,
|
||||
format,
|
||||
};
|
1097
.pullapprove.yml
1097
.pullapprove.yml
File diff suppressed because it is too large
Load Diff
38
.travis.yml
Normal file
38
.travis.yml
Normal file
@ -0,0 +1,38 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- '0.10'
|
||||
env:
|
||||
global:
|
||||
- KARMA_BROWSERS=DartiumWithWebPlatform
|
||||
- E2E_BROWSERS=Dartium
|
||||
- LOGS_DIR=/tmp/angular-build/logs
|
||||
- ARCH=linux-x64
|
||||
matrix:
|
||||
- MODE=js DART_CHANNEL=dev
|
||||
# Dissabled until Dart v1.9 hits stable
|
||||
# - MODE=dart DART_CHANNEL=stable
|
||||
- MODE=dart DART_CHANNEL=dev
|
||||
|
||||
before_install:
|
||||
- export DISPLAY=:99.0
|
||||
- export GIT_SHA=$(git rev-parse HEAD)
|
||||
- ./scripts/ci/init_android.sh
|
||||
- ./scripts/ci/install_dart.sh ${DART_CHANNEL} ${ARCH}
|
||||
- sh -e /etc/init.d/xvfb start
|
||||
- if [[ -e SKIP_TRAVIS_TESTS ]]; then { cat SKIP_TRAVIS_TESTS ; exit 0; } fi
|
||||
before_script:
|
||||
- mkdir -p $LOGS_DIR
|
||||
script:
|
||||
- ./scripts/ci/build_and_test.sh ${MODE}
|
||||
after_script:
|
||||
- ./scripts/ci/print-logs.sh
|
||||
notifications:
|
||||
webhooks:
|
||||
urls:
|
||||
- https://webhooks.gitter.im/e/1ef62e23078036f9cee4
|
||||
on_success: change # options: [always|never|change] default: always
|
||||
on_failure: always # options: [always|never|change] default: always
|
||||
on_start: false # default: false
|
||||
slack:
|
||||
secure: EP4MzZ8JMyNQJ4S3cd5LEPWSMjC7ZRdzt3veelDiOeorJ6GwZfCDHncR+4BahDzQAuqyE/yNpZqaLbwRWloDi15qIUsm09vgl/1IyNky1Sqc6lEknhzIXpWSalo4/T9ZP8w870EoDvM/UO+LCV99R3wS8Nm9o99eLoWVb2HIUu0=
|
25
.vscode/README.md
vendored
25
.vscode/README.md
vendored
@ -1,25 +0,0 @@
|
||||
# VSCode Configuration
|
||||
|
||||
This folder contains opt-in [Workspace Settings](https://code.visualstudio.com/docs/getstarted/settings), [Tasks](https://code.visualstudio.com/docs/editor/tasks), [Launch Configurations](https://code.visualstudio.com/Docs/editor/debugging#_launch-configurations) and [Extension Recommendations](https://code.visualstudio.com/docs/editor/extension-gallery#_workspace-recommended-extensions) that the Angular team recommends using when working on this repository.
|
||||
|
||||
## Usage
|
||||
|
||||
To use the recommended configurations follow the steps below:
|
||||
|
||||
- install the recommneded extensions in `.vscode/extensions.json`
|
||||
- copy (or link) `.vscode/recommended-settings.json` to `.vscode/settings.json`
|
||||
- copy (or link) `.vscode/recommended-launch.json` to `.vscode/launch.json`
|
||||
- copy (or link) `.vscode/recommended-tasks.json` to `.vscode/tasks.json`
|
||||
- restart the editor
|
||||
|
||||
If you already have your custom workspace settings you should instead manually merge the file contents.
|
||||
|
||||
This isn't an automatic process so you will need to repeat it when settings are updated.
|
||||
|
||||
To see the recommended extensions select "Extensions: Show Recommended Extensions" in the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
|
||||
## Editing `.vscode/recommended-*.json` files
|
||||
|
||||
If you wish to add extra configuration items please keep in mind any modifications you make here will be used by many users.
|
||||
|
||||
Try to keep these settings/configuations to things that help facilitate the development process and avoid altering the user workflow whenever possible.
|
15
.vscode/extensions.json
vendored
15
.vscode/extensions.json
vendored
@ -1,15 +0,0 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
85
.vscode/recommended-launch.json
vendored
85
.vscode/recommended-launch.json
vendored
@ -1,85 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": true,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug (no source maps)",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": false,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/acceptance",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/render3",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
]
|
||||
}
|
31
.vscode/recommended-settings.json
vendored
31
.vscode/recommended-settings.json
vendored
@ -1,31 +0,0 @@
|
||||
{
|
||||
// Format js and ts files on save with `clang-format.executable`
|
||||
// If `clang-format.executable` is not being used, these two settings should be removed otherwise it will break existing formatting.
|
||||
// You can instead run `yarn gulp format` to manually format your code.
|
||||
"[javascript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
// Please install https://marketplace.visualstudio.com/items?itemName=xaver.clang-format to take advantage of `clang-format` in VSCode.
|
||||
// (See https://clang.llvm.org/docs/ClangFormat.html for more info `clang-format`.)
|
||||
"clang-format.executable": "${workspaceRoot}/node_modules/.bin/clang-format",
|
||||
// Exclude third party modules and build artifacts from the editor watchers/searches.
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/**": true,
|
||||
"**/bazel-out/**": true,
|
||||
"**/dist/**": true,
|
||||
"**/aio/src/generated/**": true,
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
"**/bower_components": true,
|
||||
"**/bazel-out": true,
|
||||
"**/dist": true,
|
||||
"**/aio/src/generated": true,
|
||||
},
|
||||
"git.ignoreLimitWarning": true,
|
||||
}
|
113
.vscode/recommended-tasks.json
vendored
113
.vscode/recommended-tasks.json
vendored
@ -1,113 +0,0 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "IVY:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/render3",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
# Yarn Vendoring
|
||||
We utilize Yarn's `yarn-path` configuration in a shared `.yarnrc` file to enforce
|
||||
everyone using the same version of Yarn. Yarn checks the `.yarnrc` file to
|
||||
determine if yarn should delegate the command to a vendored version at the
|
||||
provided path.
|
||||
|
||||
## How to update
|
||||
To update to the latest version of Yarn as our vendored version:
|
||||
- Run this command
|
||||
```sh
|
||||
yarn policies set-version latest
|
||||
```
|
||||
- Remove the previous version
|
147391
.yarn/releases/yarn-1.22.4.js
vendored
147391
.yarn/releases/yarn-1.22.4.js
vendored
File diff suppressed because one or more lines are too long
5
.yarnrc
5
.yarnrc
@ -1,5 +0,0 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
yarn-path ".yarn/releases/yarn-1.22.4.js"
|
50
BUILD.bazel
50
BUILD.bazel
@ -1,50 +0,0 @@
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
exports_files([
|
||||
"LICENSE",
|
||||
"protractor-perf.conf.js",
|
||||
"karma-js.conf.js",
|
||||
"browser-providers.conf.js",
|
||||
"scripts/ci/track-payload-size.sh",
|
||||
"scripts/ci/payload-size.sh",
|
||||
"scripts/ci/payload-size.js",
|
||||
"package.json",
|
||||
])
|
||||
|
||||
alias(
|
||||
name = "tsconfig.json",
|
||||
actual = "//packages:tsconfig-build.json",
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "web_test_bootstrap_scripts",
|
||||
# do not sort
|
||||
srcs = [
|
||||
"@npm//:node_modules/core-js/client/core.js",
|
||||
"//packages/zone.js/dist:zone.js",
|
||||
"//packages/zone.js/dist:zone-testing.js",
|
||||
"//packages/zone.js/dist:task-tracking.js",
|
||||
"//:test-events.js",
|
||||
"//:shims_for_IE.js",
|
||||
# Including systemjs because it defines `__eval`, which produces correct stack traces.
|
||||
"@npm//:node_modules/systemjs/dist/system.src.js",
|
||||
"@npm//:node_modules/reflect-metadata/Reflect.js",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "angularjs_scripts",
|
||||
srcs = [
|
||||
# We also declare the unminfied AngularJS files since these can be used for
|
||||
# local debugging (e.g. see: packages/upgrade/test/common/test_helpers.ts)
|
||||
"@npm//:node_modules/angular/angular.js",
|
||||
"@npm//:node_modules/angular/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.min.js",
|
||||
"@npm//:node_modules/angular-mocks/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.5/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.6/angular-mocks.js",
|
||||
],
|
||||
)
|
6060
CHANGELOG.md
6060
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -1,12 +0,0 @@
|
||||
# Contributor Code of Conduct
|
||||
## Version 0.3b-angular
|
||||
|
||||
As contributors and maintainers of the Angular project, we pledge to respect everyone who contributes by posting issues, updating documentation, submitting pull requests, providing feedback in comments, and any other activities.
|
||||
|
||||
Communication through any of Angular's channels (GitHub, Gitter, IRC, mailing lists, Google+, Twitter, etc.) must be constructive and never resort to personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
|
||||
|
||||
We promise to extend courtesy and respect to everyone involved in this project regardless of gender, gender identity, sexual orientation, disability, age, race, ethnicity, religion, or level of experience. We expect anyone contributing to the Angular project to do the same.
|
||||
|
||||
If any member of the community violates this code of conduct, the maintainers of the Angular project may take action, removing issues, comments, and PRs or blocking accounts as deemed appropriate.
|
||||
|
||||
If you are subject to or witness unacceptable behavior, or have any other concerns, please email us at [conduct@angular.io](mailto:conduct@angular.io).
|
205
CONTRIBUTING.md
205
CONTRIBUTING.md
@ -1,6 +1,6 @@
|
||||
# Contributing to Angular
|
||||
# Contributing to Angular 2
|
||||
|
||||
We would love for you to contribute to Angular and help make it even better than it is
|
||||
We would love for you to contribute to Angular 2 and help make it even better than it is
|
||||
today! As a contributor, here are the guidelines we would like you to follow:
|
||||
|
||||
- [Code of Conduct](#coc)
|
||||
@ -17,90 +17,93 @@ Help us keep Angular open and inclusive. Please read and follow our [Code of Con
|
||||
|
||||
## <a name="question"></a> Got a Question or Problem?
|
||||
|
||||
Do not open issues for general support questions as we want to keep GitHub issues for bug reports and feature requests. You've got much better chances of getting your question answered on [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) where the questions should be tagged with tag `angular`.
|
||||
If you have questions about how to *use* Angular, please direct them to the [Google Group][angular-group]
|
||||
discussion list or [StackOverflow][stackoverflow]. We are also available on [Gitter][gitter].
|
||||
|
||||
Stack Overflow is a much better place to ask questions since:
|
||||
|
||||
- there are thousands of people willing to help on Stack Overflow
|
||||
- questions and answers stay available for public viewing so your question / answer might help someone else
|
||||
- Stack Overflow's voting system assures that the best answers are prominently visible.
|
||||
|
||||
To save your and our time, we will systematically close all issues that are requests for general support and redirect people to Stack Overflow.
|
||||
|
||||
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
If you find a bug in the source code, you can help us by
|
||||
## <a name="issue"></a> Found an Issue?
|
||||
If you find a bug in the source code or a mistake in the documentation, you can help us by
|
||||
[submitting an issue](#submit-issue) to our [GitHub Repository][github]. Even better, you can
|
||||
[submit a Pull Request](#submit-pr) with a fix.
|
||||
|
||||
## <a name="feature"></a> Missing a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our GitHub
|
||||
Repository. If you would like to *implement* a new feature, please submit an issue with
|
||||
a proposal for your work first, to be sure that we can use it.
|
||||
Please consider what kind of change it is:
|
||||
## <a name="feature"></a> Want a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our [GitHub
|
||||
Repository][github]. If you would like to *implement* a new feature then consider what kind of
|
||||
change it is:
|
||||
|
||||
* For a **Major Feature**, first open an issue and outline your proposal so that it can be
|
||||
discussed. This will also allow us to better coordinate our efforts, prevent duplication of work,
|
||||
and help you to craft the change so that it is successfully accepted into the project.
|
||||
* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
|
||||
|
||||
## <a name="docs"></a> Want a Doc Fix?
|
||||
If you want to help improve the docs, then consider what kind of improvement it is:
|
||||
|
||||
* For **Major Changes**, it's a good idea to let others know what you're working on to
|
||||
minimize duplication of effort. Before starting, check out the issue queue for
|
||||
issues labeled [#docs](https://github.com/angular/angular/labels/%23docs).
|
||||
Comment on an issue to let others know what you're working on, or [create a new issue](#submit-issue)
|
||||
if your work doesn't fit within the scope of any of the existing doc issues.
|
||||
Please build and test the documentation before [submitting the Pull Request](#submit-pr), to be sure
|
||||
you haven't accidentally introduced any layout or formatting issues. Also ensure that your commit
|
||||
message is labeled "docs" and follows the [Commit Message Guidelines](#commit) given below.
|
||||
* For **Small Changes**, there is no need to file an issue first. Simply [submit a Pull Request](#submit-pr).
|
||||
|
||||
## <a name="submit"></a> Submission Guidelines
|
||||
|
||||
### <a name="submit-issue"></a> Submitting an Issue
|
||||
Before you submit an issue, search the archive, maybe your question was already answered.
|
||||
|
||||
Before you submit an issue, please search the issue tracker, maybe an issue for your problem already exists and the discussion might inform you of workarounds readily available.
|
||||
|
||||
We want to fix all the issues as soon as possible, but before fixing a bug we need to reproduce and confirm it. In order to reproduce bugs, we will systematically ask you to provide a minimal reproduction. Having a minimal reproducible scenario gives us a wealth of important information without going back & forth to you with additional questions.
|
||||
|
||||
A minimal reproduction allows us to quickly confirm a bug (or point out a coding problem) as well as confirm that we are fixing the right problem.
|
||||
|
||||
We will be insisting on a minimal reproduction scenario in order to save maintainers time and ultimately be able to fix more bugs. Interestingly, from our experience, users often find coding problems themselves while preparing a minimal reproduction. We understand that sometimes it might be hard to extract essential bits of code from a larger codebase but we really need to isolate the problem before we can fix it.
|
||||
|
||||
Unfortunately, we are not able to investigate / fix bugs without a minimal reproduction, so if we don't hear back from you, we are going to close an issue that doesn't have enough info to be reproduced.
|
||||
|
||||
You can file new issues by selecting from our [new issue templates](https://github.com/angular/angular/issues/new/choose) and filling out the issue template.
|
||||
If your issue appears to be a bug, and hasn't been reported, open a new issue.
|
||||
Help us to maximize the effort we can spend fixing issues and adding new
|
||||
features, by not reporting duplicate issues. Providing the following information will increase the
|
||||
chances of your issue being dealt with quickly:
|
||||
|
||||
* **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
|
||||
* **Motivation for or Use Case** - explain why this is a bug for you
|
||||
* **Angular Version(s)** - is it a regression?
|
||||
* **Browsers and Operating System** - is this a problem with all browsers?
|
||||
* **Reproduce the Error** - provide a live example (using [Plunker][plunker],
|
||||
[JSFiddle][jsfiddle] or [Runnable][runnable]) or a unambiguous set of steps.
|
||||
* **Related Issues** - has a similar issue been reported before?
|
||||
* **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
|
||||
causing the problem (line of code or commit)
|
||||
|
||||
### <a name="submit-pr"></a> Submitting a Pull Request (PR)
|
||||
Before you submit your Pull Request (PR) consider the following guidelines:
|
||||
|
||||
1. Search [GitHub](https://github.com/angular/angular/pulls) for an open or closed PR
|
||||
* Search [GitHub](https://github.com/angular/angular.dart/pulls) for an open or closed PR
|
||||
that relates to your submission. You don't want to duplicate effort.
|
||||
1. Be sure that an issue describes the problem you're fixing, or documents the design for the feature you'd like to add.
|
||||
Discussing the design up front helps to ensure that we're ready to accept your work.
|
||||
1. Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without this. Make sure you sign with the primary email address of the Git identity that has been granted access to the Angular repository.
|
||||
1. Fork the angular/angular repo.
|
||||
1. Make your changes in a new git branch:
|
||||
* Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without this.
|
||||
* Make your changes in a new git branch:
|
||||
|
||||
```shell
|
||||
git checkout -b my-fix-branch master
|
||||
```
|
||||
|
||||
1. Create your patch, **including appropriate test cases**.
|
||||
1. Follow our [Coding Rules](#rules).
|
||||
1. Run the full Angular test suite, as described in the [developer documentation][dev-doc],
|
||||
* Create your patch, **including appropriate test cases**.
|
||||
* Follow our [Coding Rules](#rules).
|
||||
* Run the full Angular test suite, as described in the [developer documentation][dev-doc],
|
||||
and ensure that all tests pass.
|
||||
1. Commit your changes using a descriptive commit message that follows our
|
||||
* Commit your changes using a descriptive commit message that follows our
|
||||
[commit message conventions](#commit). Adherence to these conventions
|
||||
is necessary because release notes are automatically generated from these messages.
|
||||
|
||||
```shell
|
||||
git commit -a
|
||||
```
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
|
||||
1. Push your branch to GitHub:
|
||||
* Push your branch to GitHub:
|
||||
|
||||
```shell
|
||||
git push origin my-fix-branch
|
||||
```
|
||||
|
||||
1. In GitHub, send a pull request to `angular:master`.
|
||||
* In GitHub, send a pull request to `angular:master`.
|
||||
* If we suggest changes then:
|
||||
* Make the required updates.
|
||||
* Re-run the Angular test suites to ensure tests are still passing.
|
||||
* Re-run the Angular 2 test suites for JS and Dart to ensure tests are still passing.
|
||||
* Rebase your branch and force push to your GitHub repository (this will update your Pull Request):
|
||||
|
||||
```shell
|
||||
@ -144,9 +147,9 @@ To ensure consistency throughout the source code, keep these rules in mind as yo
|
||||
|
||||
* All features or bug fixes **must be tested** by one or more specs (unit-tests).
|
||||
* All public API methods **must be documented**. (Details TBC).
|
||||
* We follow [Google's JavaScript Style Guide][js-style-guide], but wrap all code at
|
||||
**100 characters**. An automated formatter is available, see
|
||||
[DEVELOPER.md](docs/DEVELOPER.md#clang-format).
|
||||
* With the exceptions listed below, we follow the rules contained in
|
||||
[Google's JavaScript Style Guide][js-style-guide]:
|
||||
* Wrap all code at **100 characters**.
|
||||
|
||||
## <a name="commit"></a> Commit Message Guidelines
|
||||
|
||||
@ -166,87 +169,32 @@ format that includes a **type**, a **scope** and a **subject**:
|
||||
<footer>
|
||||
```
|
||||
|
||||
The **header** is mandatory and the **scope** of the header is optional.
|
||||
|
||||
Any line of the commit message cannot be longer than 100 characters! This allows the message to be easier
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier
|
||||
to read on GitHub as well as in various git tools.
|
||||
|
||||
The footer should contain a [closing reference to an issue](https://help.github.com/articles/closing-issues-via-commit-messages/) if any.
|
||||
|
||||
Samples: (even more [samples](https://github.com/angular/angular/commits/master))
|
||||
|
||||
```
|
||||
docs(changelog): update changelog to beta.5
|
||||
```
|
||||
```
|
||||
fix(release): need to depend on latest rxjs and zone.js
|
||||
|
||||
The version in our package.json gets copied to the one we publish, and users need the latest of these.
|
||||
```
|
||||
|
||||
### Revert
|
||||
If the commit reverts a previous commit, it should begin with `revert: `, followed by the header of the reverted commit. In the body it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit being reverted.
|
||||
|
||||
### Type
|
||||
Must be one of the following:
|
||||
|
||||
* **build**: Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)
|
||||
* **ci**: Changes to our CI configuration files and scripts (example scopes: Circle, BrowserStack, SauceLabs)
|
||||
* **docs**: Documentation only changes
|
||||
* **feat**: A new feature
|
||||
* **fix**: A bug fix
|
||||
* **docs**: Documentation only changes
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing
|
||||
semi-colons, etc)
|
||||
* **refactor**: A code change that neither fixes a bug or adds a feature
|
||||
* **perf**: A code change that improves performance
|
||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
* **test**: Adding missing tests or correcting existing tests
|
||||
* **test**: Adding missing tests
|
||||
* **chore**: Changes to the build process or auxiliary tools and libraries such as documentation
|
||||
generation
|
||||
|
||||
### Scope
|
||||
The scope should be the name of the npm package affected (as perceived by the person reading the changelog generated from commit messages).
|
||||
|
||||
The following is the list of supported scopes:
|
||||
|
||||
* **animations**
|
||||
* **bazel**
|
||||
* **benchpress**
|
||||
* **common**
|
||||
* **compiler**
|
||||
* **compiler-cli**
|
||||
* **core**
|
||||
* **elements**
|
||||
* **forms**
|
||||
* **http**
|
||||
* **language-service**
|
||||
* **localize**
|
||||
* **platform-browser**
|
||||
* **platform-browser-dynamic**
|
||||
* **platform-server**
|
||||
* **platform-webworker**
|
||||
* **platform-webworker-dynamic**
|
||||
* **router**
|
||||
* **service-worker**
|
||||
* **upgrade**
|
||||
* **zone.js**
|
||||
|
||||
There are currently a few exceptions to the "use package name" rule:
|
||||
|
||||
* **packaging**: used for changes that change the npm package layout in all of our packages, e.g.
|
||||
public path changes, package.json changes done to all packages, d.ts file/format changes, changes
|
||||
to bundles, etc.
|
||||
* **changelog**: used for updating the release notes in CHANGELOG.md
|
||||
* **docs-infra**: used for docs-app (angular.io) related changes within the /aio directory of the
|
||||
repo
|
||||
* **dev-infra**: used for dev-infra related changes within the directories /scripts, /tools and /dev-infra
|
||||
* **ngcc**: used for changes to the [Angular Compatibility Compiler](./packages/compiler-cli/ngcc/README.md)
|
||||
* **ve**: used for changes specific to ViewEngine (legacy compiler/renderer).
|
||||
* none/empty string: useful for `style`, `test` and `refactor` changes that are done across all
|
||||
packages (e.g. `style: add missing semicolons`) and for docs changes that are not related to a
|
||||
specific package (e.g. `docs: fix typo in tutorial`).
|
||||
The scope could be anything specifying place of the commit change. For example
|
||||
`Compiler`, `ElementInjector`, etc.
|
||||
|
||||
### Subject
|
||||
The subject contains a succinct description of the change:
|
||||
The subject contains succinct description of the change:
|
||||
|
||||
* use the imperative, present tense: "change" not "changed" nor "changes"
|
||||
* don't capitalize the first letter
|
||||
* don't capitalize first letter
|
||||
* no dot (.) at the end
|
||||
|
||||
### Body
|
||||
@ -257,7 +205,6 @@ The body should include the motivation for the change and contrast this with pre
|
||||
The footer should contain any information about **Breaking Changes** and is also the place to
|
||||
reference GitHub issues that this commit **Closes**.
|
||||
|
||||
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this.
|
||||
|
||||
A detailed explanation can be found in this [document][commit-message-format].
|
||||
|
||||
@ -266,33 +213,21 @@ A detailed explanation can be found in this [document][commit-message-format].
|
||||
Please sign our Contributor License Agreement (CLA) before sending pull requests. For any code
|
||||
changes to be accepted, the CLA must be signed. It's a quick process, we promise!
|
||||
|
||||
* For individuals, we have a [simple click-through form][individual-cla].
|
||||
* For corporations, we'll need you to
|
||||
* For individuals we have a [simple click-through form][individual-cla].
|
||||
* For corporations we'll need you to
|
||||
[print, sign and one of scan+email, fax or mail the form][corporate-cla].
|
||||
|
||||
<hr>
|
||||
|
||||
If you have more than one Git identity, you must make sure that you sign the CLA using the primary email address associated with the ID that has been granted access to the Angular repository. Git identities can be associated with more than one email address, and only one is primary. Here are some links to help you sort out multiple Git identities and email addresses:
|
||||
|
||||
* https://help.github.com/articles/setting-your-commit-email-address-in-git/
|
||||
* https://stackoverflow.com/questions/37245303/what-does-usera-committed-with-userb-13-days-ago-on-github-mean
|
||||
* https://help.github.com/articles/about-commit-email-addresses/
|
||||
* https://help.github.com/articles/blocking-command-line-pushes-that-expose-your-personal-email-address/
|
||||
|
||||
Note that if you have more than one Git identity, it is important to verify that you are logged in with the same ID with which you signed the CLA, before you commit changes. If not, your PR will fail the CLA check.
|
||||
|
||||
<hr>
|
||||
|
||||
[angular-group]: https://groups.google.com/forum/#!forum/angular
|
||||
[coc]: https://github.com/angular/code-of-conduct/blob/master/CODE_OF_CONDUCT.md
|
||||
[commit-message-format]: https://docs.google.com/document/d/1QrDFcIiPjSLDn3EL15IJygNPiHORgU1_OOAqWjiDU5Y/edit#
|
||||
[corporate-cla]: http://code.google.com/legal/corporate-cla-v1.0.html
|
||||
[dev-doc]: https://github.com/angular/angular/blob/master/docs/DEVELOPER.md
|
||||
[dev-doc]: https://github.com/angular/angular/blob/master/DEVELOPER.md
|
||||
[github]: https://github.com/angular/angular
|
||||
[gitter]: https://gitter.im/angular/angular
|
||||
[individual-cla]: http://code.google.com/legal/individual-cla-v1.0.html
|
||||
[js-style-guide]: https://google.github.io/styleguide/jsguide.html
|
||||
[jsfiddle]: http://jsfiddle.net
|
||||
[js-style-guide]: http://google-styleguide.googlecode.com/svn/trunk/javascriptguide.xml
|
||||
[jsfiddle]: http://jsfiddle.net/
|
||||
[plunker]: http://plnkr.co/edit
|
||||
[runnable]: http://runnable.com
|
||||
[runnable]: http://runnable.com/
|
||||
[stackoverflow]: http://stackoverflow.com/questions/tagged/angular
|
||||
|
235
DEVELOPER.md
Normal file
235
DEVELOPER.md
Normal file
@ -0,0 +1,235 @@
|
||||
# Building and Testing Angular 2 for JS and Dart
|
||||
|
||||
This document describes how to set up your development environment to build and test Angular, both
|
||||
JS and Dart versions. It also explains the basic mechanics of using `git`, `node`, and `npm`.
|
||||
|
||||
See the [contributing guidelines](https://github.com/angular/angular/blob/master/CONTRIBUTING.md)
|
||||
for how to contribute your own code to
|
||||
|
||||
1. [Prerequisite Software](#prerequisite-software)
|
||||
2. [Getting the Sources](#getting-the-sources)
|
||||
3. [Environment Variable Setup](#environment-variable-setup)
|
||||
4. [Installing NPM Modules and Dart Packages](#installing-npm-modules-and-dart-packages)
|
||||
5. [Running Tests Locally](#running-tests-locally)
|
||||
6. [Project Information](#project-information)
|
||||
7. [CI using Travis](#ci-using-travis)
|
||||
8. [Debugging](#debugging)
|
||||
|
||||
## Prerequisite Software
|
||||
|
||||
Before you can build and test Angular, you must install and configure the
|
||||
following products on your development machine:
|
||||
|
||||
* [Dart](https://www.dartlang.org) (version `>=1.9.0-dev.8.0`), specifically the Dart-SDK and
|
||||
Dartium (a version of [Chromium](http://www.chromium.org) with native support for Dart through
|
||||
the Dart VM). One of the **simplest** ways to get both is to install the **Dart Editor bundle**,
|
||||
which includes the editor, SDK and Dartium. See the [Dart tools](https://www.dartlang.org/tools)
|
||||
download [page for instructions](https://www.dartlang.org/tools/download.html); note that you can
|
||||
download both **stable** and **dev** channel versions from the [download
|
||||
archive](https://www.dartlang.org/tools/download-archive).
|
||||
|
||||
* [Git](http://git-scm.com) and/or the **Github app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)): the [Github Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
|
||||
* [Node.js](http://nodejs.org) which is used to run a development web server, run tests, and
|
||||
generate distributable files. We also use Node's Package Manager (`npm`). Depending on your
|
||||
system, you can install Node either from source or as a pre-packaged bundle.
|
||||
|
||||
* [Chrome Canary](https://www.google.com/chrome/browser/canary.html), a version of Chrome with
|
||||
bleeding edge functionality, built especially for developers (and early adopters).
|
||||
|
||||
|
||||
## Getting the Sources
|
||||
|
||||
Forking and cloning the Angular repository:
|
||||
|
||||
1. Login to your Github account or create one by following the instructions given
|
||||
[here](https://github.com/signup/free).
|
||||
2. [Fork](http://help.github.com/forking) the [main Angular
|
||||
repository](https://github.com/angular/angular).
|
||||
3. Clone your fork of the Angular repository and define an `upstream` remote pointing back to
|
||||
the Angular repository that you forked in the first place:
|
||||
|
||||
```shell
|
||||
# Clone your Github repository:
|
||||
git clone git@github.com:<github username>/angular.git
|
||||
|
||||
# Go to the Angular directory:
|
||||
cd angular
|
||||
|
||||
# Add the main Angular repository as an upstream remote to your repository:
|
||||
git remote add upstream https://github.com/angular/angular.git
|
||||
```
|
||||
|
||||
## Environment Variable Setup
|
||||
|
||||
Define the environment variables listed below. These are mainly needed for the testing. The
|
||||
notation shown here is for [`bash`](http://www.gnu.org/software/bash); adapt as appropriate for
|
||||
your favorite shell.
|
||||
|
||||
Examples given below of possible values for initializing the environment variables assume **Mac OS
|
||||
X** and that you have installed the Dart Editor in the directory named by
|
||||
`DART_EDITOR_DIR=/Applications/dart`. This is only for illustrative purposes.
|
||||
|
||||
```shell
|
||||
# DARTIUM_BIN: path to a Dartium browser executable; used by Karma to run Dart tests
|
||||
export DARTIUM_BIN="$DART_EDITOR_DIR/chromium/Chromium.app/Contents/MacOS/Chromium"
|
||||
```
|
||||
|
||||
Add the Dart SDK `bin` directory to your path and/or define `DART_SDK` (this is also detailed
|
||||
[here](https://www.dartlang.org/tools/pub/installing.html)):
|
||||
|
||||
```shell
|
||||
# DART_SDK: path to a Dart SDK directory
|
||||
export DART_SDK="$DART_EDITOR_DIR/dart-sdk"
|
||||
|
||||
# Update PATH to include the Dart SDK bin directory
|
||||
PATH+=":$DART_SDK/bin"
|
||||
```
|
||||
|
||||
## Installing NPM Modules and Dart Packages
|
||||
|
||||
Next, install the modules and packages needed to build Angular and run tests:
|
||||
|
||||
```shell
|
||||
# Install Angular project dependencies (package.json)
|
||||
npm install
|
||||
|
||||
# Ensure protractor has the latest webdriver
|
||||
$(npm bin)/webdriver-manager update
|
||||
|
||||
# Install Dart packages
|
||||
pub get
|
||||
```
|
||||
|
||||
**Optional**: In this document, we make use of project local `npm` package scripts and binaries
|
||||
(stored under `./node_modules/.bin`) by prefixing these command invocations with `$(npm bin)`; in
|
||||
particular `gulp` and `protractor` commands. If you prefer, you can drop this path prefix by
|
||||
globally installing these two packages as follows:
|
||||
|
||||
* `npm install -g gulp` (you might need to prefix this command with `sudo`)
|
||||
* `npm install -g protractor` (you might need to prefix this command with `sudo`)
|
||||
|
||||
Since global installs can become stale, we avoid their use in these instructions.
|
||||
|
||||
## Build commands
|
||||
|
||||
To build Angular and prepare tests run
|
||||
|
||||
```shell
|
||||
$(npm bin)/gulp build
|
||||
```
|
||||
|
||||
Notes:
|
||||
* Results are put in the `dist` folder.
|
||||
* This will also run `pub get` for the subfolders in `modules` and run `dartanalyzer` for
|
||||
every file that matches `<module>/src/<module>.dart`, e.g. `di/src/di.dart`
|
||||
|
||||
To clean out the `dist` folder use:
|
||||
```shell
|
||||
$(npm bin)/gulp clean
|
||||
```
|
||||
|
||||
## Running Tests Locally
|
||||
|
||||
### Basic tests
|
||||
|
||||
1. `$(npm bin)/gulp test.unit.js`: JS tests in a browser; runs in **watch mode** (i.e. karma
|
||||
watches the test files for changes and re-runs tests when files are updated).
|
||||
2. `$(npm bin)/gulp test.unit.cjs`: JS tests in NodeJS; runs in **watch mode**
|
||||
3. `$(npm bin)/gulp test.unit.dart`: Dart tests in Dartium; runs in **watch mode**.
|
||||
|
||||
If you prefer running tests in "single-run" mode rather than watch mode use
|
||||
|
||||
* `$(npm bin)/gulp test.unit.js/ci`
|
||||
* `$(npm bin)/gulp test.unit.dart/ci`
|
||||
|
||||
**Note**: If you want to only run a single test you can alter the test you wish
|
||||
to run by changing `it` to `iit` or `describe` to `ddescribe`. This will only
|
||||
run that individual test and make it much easier to debug. `xit` and `xdescribe`
|
||||
can also be useful to exclude a test and a group of tests respectively.
|
||||
|
||||
**Note** for transpiler tests: The karma preprocessor is setup in a way so that after every test
|
||||
run the transpiler is reloaded. With that it is possible to make changes to the preprocessor and
|
||||
run the tests without exiting karma (just touch a test file that you would like to run).
|
||||
|
||||
### E2e tests
|
||||
|
||||
1. `$(npm bin)/gulp build.js.cjs` (builds benchpress and tests into `dist/js/cjs` folder).
|
||||
2. `$(npm bin)/gulp serve.js.prod serve.js.dart2js` (runs local webserver).
|
||||
3. `$(npm bin)/protractor protractor-js.conf.js`: JS e2e tests.
|
||||
4. `$(npm bin)/protractor protractor-dart2js.conf.js`: Dart2JS e2e tests.
|
||||
|
||||
Angular specific command line options when running protractor:
|
||||
- `$(npm bin)/protractor protractor-{js|dart2js}-conf.js --ng-help`
|
||||
|
||||
### Performance tests
|
||||
|
||||
1. `$(npm bin)/gulp build.js.cjs` (builds benchpress and tests into `dist/js/cjs` folder)
|
||||
2. `$(npm bin)/gulp serve.js.prod serve.js.dart2js` (runs local webserver)
|
||||
3. `$(npm bin)/protractor protractor-js.conf.js --benchmark`: JS performance tests
|
||||
4. `$(npm bin)/protractor protractor-dart2js.conf.js --benchmark`: Dart2JS performance tests
|
||||
|
||||
Angular specific command line options when running protractor (e.g. force gc, ...):
|
||||
`$(npm bin)/protractor protractor-{js|dart2js}-conf.js --ng-help`
|
||||
|
||||
## Project Information
|
||||
|
||||
### Folder structure
|
||||
|
||||
* `modules/*`: modules that will be loaded in the browser
|
||||
* `tools/*`: tools that are needed to build Angular
|
||||
* `dist/*`: build files are placed here.
|
||||
|
||||
### File suffixes
|
||||
|
||||
* `*.js`: javascript files that get transpiled to Dart and EcmaScript 5
|
||||
* `*.es6`: javascript files that get transpiled only to EcmaScript 5
|
||||
* `*.es5`: javascript files that don't get transpiled
|
||||
* `*.dart`: dart files that don't get transpiled
|
||||
|
||||
## CI using Travis
|
||||
|
||||
For instructions on setting up Continuous Integration using Travis, see the instructions given
|
||||
[here](https://github.com/angular/angular.dart/blob/master/travis.md).
|
||||
|
||||
## Debugging
|
||||
|
||||
### Debug the transpiler
|
||||
|
||||
If you need to debug the transpiler:
|
||||
|
||||
- add a `debugger;` statement in the transpiler code,
|
||||
- from the root folder, execute `node debug $(npm bin)/gulp build` to enter the node
|
||||
debugger
|
||||
- press "c" to execute the program until you reach the `debugger;` statement,
|
||||
- you can then type "repl" to enter the REPL and inspect variables in the context.
|
||||
|
||||
See the [Node.js manual](http://nodejs.org/api/debugger.html) for more information.
|
||||
|
||||
Notes:
|
||||
- You can also execute `node $(npm bin)/karma start karma-dart.conf.js` depending on which
|
||||
code you want to debug (the former will process the "modules" folder while the later processes
|
||||
the transpiler specs).
|
||||
- You can also add `debugger;` statements in the specs (JavaScript). The execution will halt when
|
||||
the developer tools are opened in the browser running Karma.
|
||||
|
||||
### Debug the tests
|
||||
|
||||
If you need to debug the tests:
|
||||
|
||||
- add a `debugger;` statement to the test you want to debug (oe the source code),
|
||||
- execute karma `$(npm bin)/gulp test.js`,
|
||||
- press the top right "DEBUG" button,
|
||||
- open the dev tools and press F5,
|
||||
- the execution halt at the `debugger;` statement
|
||||
|
||||
**Note (WebStorm users)**:
|
||||
You can create a Karma run config from WebStorm.
|
||||
Then in the "Run" menu, press "Debug 'karma-js.conf.js'", WebStorm will stop in the generated code
|
||||
on the `debugger;` statement.
|
||||
You can then step into the code and add watches.
|
||||
The `debugger;` statement is needed because WebStorm will stop in a transpiled file. Breakpoints in
|
||||
the original source files are not supported at the moment.
|
||||
|
215
LICENSE
215
LICENSE
@ -1,21 +1,202 @@
|
||||
The MIT License
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2010-2020 Google LLC. http://angular.io/license
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
62
README.md
62
README.md
@ -1,26 +1,52 @@
|
||||
[](https://circleci.com/gh/angular/workflows/angular/tree/master)
|
||||
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://www.npmjs.com/@angular/core)
|
||||
Angular [](https://travis-ci.org/angular/angular) [](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
=========
|
||||
|
||||
Angular is a development platform for building mobile and desktop web applications. This is the
|
||||
repository for [Angular 2][ng2], both the JavaScript (JS) and [Dart][dart] versions.
|
||||
|
||||
Angular 2 is currently in **Alpha Preview**. We recommend using Angular 1.X for production
|
||||
applications:
|
||||
|
||||
* [AngularJS][ngJS]: [angular/angular.js](http://github.com/angular/angular.js).
|
||||
* [AngularDart][ngDart]: [angular/angular.dart](http://github.com/angular/angular.dart).
|
||||
|
||||
|
||||
# Angular
|
||||
## Setup & Install Angular 2
|
||||
|
||||
Angular is a development platform for building mobile and desktop web applications using TypeScript/JavaScript and other languages.
|
||||
Follow the instructions given on the [Angular download page][download].
|
||||
|
||||
## Quickstart
|
||||
|
||||
[Get started in 5 minutes][quickstart].
|
||||
|
||||
## Changelog
|
||||
|
||||
[Learn about the latest improvements][changelog].
|
||||
|
||||
## Want to help?
|
||||
|
||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
||||
guidelines for [contributing][contributing] and then check out one of our issues in the [hotlist: community-help](https://github.com/angular/angular/labels/hotlist%3A%20community-help).
|
||||
Want to file a bug, or contribute some code or improve documentation? Excellent! Read up on our
|
||||
guidelines for [contributing][contributing].
|
||||
|
||||
[contributing]: https://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[quickstart]: https://angular.io/start
|
||||
[changelog]: https://github.com/angular/angular/blob/master/CHANGELOG.md
|
||||
[ng]: https://angular.io
|
||||
|
||||
## Examples
|
||||
|
||||
To see the examples, first build the project as described
|
||||
[here](http://github.com/angular/angular/blob/master/DEVELOPER.md).
|
||||
|
||||
### Hello World Example
|
||||
|
||||
This example consists of three basic pieces - a component, a decorator and a
|
||||
service. They are all constructed via injection. For more information see the
|
||||
comments in the source `modules/examples/src/hello_world/index.js`.
|
||||
|
||||
You can build this example as either JS or Dart app:
|
||||
|
||||
* JS:
|
||||
* `$(npm bin)/gulp serve.js.dev`, and
|
||||
* open `localhost:8000/examples/src/hello_world/` in Chrome.
|
||||
* Dart:
|
||||
* `$(npm bin)/gulp serve/examples.dart`, and
|
||||
* open `localhost:8080/src/hello_world` in Chrome (for dart2js) or
|
||||
[Dartium][dartium] (for Dart VM).
|
||||
|
||||
[contributing]: http://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[dart]: http://www.dartlang.org
|
||||
[dartium]: http://www.dartlang.org/tools/dartium
|
||||
[download]: http://angular.io/download
|
||||
[ng2]: http://angular.io
|
||||
[ngDart]: http://angulardart.org
|
||||
[ngJS]: http://angularjs.org
|
||||
|
107
WORKSPACE
107
WORKSPACE
@ -1,107 +0,0 @@
|
||||
workspace(
|
||||
name = "angular",
|
||||
managed_directories = {"@npm": ["node_modules"]},
|
||||
)
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
# Fetch rules_nodejs so we can install our npm dependencies
|
||||
http_archive(
|
||||
name = "build_bazel_rules_nodejs",
|
||||
sha256 = "f9e7b9f42ae202cc2d2ce6d698ccb49a9f7f7ea572a78fd451696d03ef2ee116",
|
||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.6.0/rules_nodejs-1.6.0.tar.gz"],
|
||||
)
|
||||
|
||||
# Check the rules_nodejs version and download npm dependencies
|
||||
# Note: bazel (version 2 and after) will check the .bazelversion file so we don't need to
|
||||
# assert on that.
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "check_rules_nodejs_version", "node_repositories", "yarn_install")
|
||||
|
||||
check_rules_nodejs_version(minimum_version_string = "1.6.0")
|
||||
|
||||
# Setup the Node.js toolchain
|
||||
node_repositories(
|
||||
node_repositories = {
|
||||
"12.14.1-darwin_amd64": ("node-v12.14.1-darwin-x64.tar.gz", "node-v12.14.1-darwin-x64", "0be10a28737527a1e5e3784d3ad844d742fe8b0718acd701fd48f718fd3af78f"),
|
||||
"12.14.1-linux_amd64": ("node-v12.14.1-linux-x64.tar.xz", "node-v12.14.1-linux-x64", "07cfcaa0aa9d0fcb6e99725408d9e0b07be03b844701588e3ab5dbc395b98e1b"),
|
||||
"12.14.1-windows_amd64": ("node-v12.14.1-win-x64.zip", "node-v12.14.1-win-x64", "1f96ccce3ba045ecea3f458e189500adb90b8bc1a34de5d82fc10a5bf66ce7e3"),
|
||||
},
|
||||
node_version = "12.14.1",
|
||||
package_json = ["//:package.json"],
|
||||
)
|
||||
|
||||
load("//integration:angular_integration_test.bzl", "npm_package_archives")
|
||||
|
||||
yarn_install(
|
||||
name = "npm",
|
||||
manual_build_file_contents = npm_package_archives(),
|
||||
package_json = "//:package.json",
|
||||
yarn_lock = "//:yarn.lock",
|
||||
)
|
||||
|
||||
# Install all bazel dependencies of the @npm npm packages
|
||||
load("@npm//:install_bazel_dependencies.bzl", "install_bazel_dependencies")
|
||||
|
||||
install_bazel_dependencies()
|
||||
|
||||
# Load angular dependencies
|
||||
load("//packages/bazel:package.bzl", "rules_angular_dev_dependencies")
|
||||
|
||||
rules_angular_dev_dependencies()
|
||||
|
||||
# Load protractor dependencies
|
||||
load("@npm_bazel_protractor//:package.bzl", "npm_bazel_protractor_dependencies")
|
||||
|
||||
npm_bazel_protractor_dependencies()
|
||||
|
||||
# Load karma dependencies
|
||||
load("@npm_bazel_karma//:package.bzl", "npm_bazel_karma_dependencies")
|
||||
|
||||
npm_bazel_karma_dependencies()
|
||||
|
||||
# Setup the rules_webtesting toolchain
|
||||
load("@io_bazel_rules_webtesting//web:repositories.bzl", "web_test_repositories")
|
||||
|
||||
web_test_repositories()
|
||||
|
||||
load("//tools/browsers:browser_repositories.bzl", "browser_repositories")
|
||||
|
||||
browser_repositories()
|
||||
|
||||
# Setup the rules_typescript tooolchain
|
||||
load("@npm_bazel_typescript//:index.bzl", "ts_setup_workspace")
|
||||
|
||||
ts_setup_workspace()
|
||||
|
||||
# Setup the rules_sass toolchain
|
||||
load("@io_bazel_rules_sass//sass:sass_repositories.bzl", "sass_repositories")
|
||||
|
||||
sass_repositories()
|
||||
|
||||
# Setup the skydoc toolchain
|
||||
load("@io_bazel_skydoc//skylark:skylark.bzl", "skydoc_repositories")
|
||||
|
||||
skydoc_repositories()
|
||||
|
||||
load("@bazel_toolchains//rules:environments.bzl", "clang_env")
|
||||
load("@bazel_toolchains//rules:rbe_repo.bzl", "rbe_autoconfig")
|
||||
|
||||
rbe_autoconfig(
|
||||
name = "rbe_ubuntu1604_angular",
|
||||
# Need to specify a base container digest in order to ensure that we can use the checked-in
|
||||
# platform configurations for the "ubuntu16_04" image. Otherwise the autoconfig rule would
|
||||
# need to pull the image and run it in order determine the toolchain configuration. See:
|
||||
# https://github.com/bazelbuild/bazel-toolchains/blob/1.1.2/configs/ubuntu16_04_clang/versions.bzl
|
||||
base_container_digest = "sha256:1ab40405810effefa0b2f45824d6d608634ccddbf06366760c341ef6fbead011",
|
||||
# Note that if you change the `digest`, you might also need to update the
|
||||
# `base_container_digest` to make sure marketplace.gcr.io/google/rbe-ubuntu16-04-webtest:<digest>
|
||||
# and marketplace.gcr.io/google/rbe-ubuntu16-04:<base_container_digest> have
|
||||
# the same Clang and JDK installed. Clang is needed because of the dependency on
|
||||
# @com_google_protobuf. Java is needed for the Bazel's test executor Java tool.
|
||||
digest = "sha256:0b8fa87db4b8e5366717a7164342a029d1348d2feea7ecc4b18c780bc2507059",
|
||||
env = clang_env(),
|
||||
registry = "marketplace.gcr.io",
|
||||
# We can't use the default "ubuntu16_04" RBE image provided by the autoconfig because we need
|
||||
# a specific Linux kernel that comes with "libx11" in order to run headless browser tests.
|
||||
repository = "google/rbe-ubuntu16-04-webtest",
|
||||
)
|
@ -1,12 +0,0 @@
|
||||
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
|
||||
# For additional information regarding the format and rule options, please see:
|
||||
# https://github.com/browserslist/browserslist#queries
|
||||
|
||||
# Googlebot uses an older version of Chrome
|
||||
# For additional information see: https://developers.google.com/search/docs/guides/rendering
|
||||
|
||||
> 0.5%
|
||||
last 2 major versions
|
||||
Firefox ESR
|
||||
not dead
|
||||
IE 11
|
48
aio/.gitignore
vendored
48
aio/.gitignore
vendored
@ -1,48 +0,0 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# compiled output
|
||||
/dist
|
||||
/out-tsc
|
||||
/src/generated
|
||||
/tmp
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
|
||||
# IDEs and editors
|
||||
/.idea
|
||||
.project
|
||||
.classpath
|
||||
.c9/
|
||||
*.launch
|
||||
.settings/
|
||||
*.sublime-workspace
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
/.firebase/
|
||||
/.sass-cache
|
||||
/connect.lock
|
||||
/coverage
|
||||
/libpeerconnection.log
|
||||
debug.log
|
||||
firebase-debug.log
|
||||
npm-debug.log
|
||||
testem.log
|
||||
/typings
|
||||
yarn-error.log
|
||||
|
||||
# e2e
|
||||
/e2e/*.js
|
||||
/e2e/*.map
|
||||
protractor-results*.txt
|
||||
|
||||
# System Files
|
||||
.DS_Store
|
||||
Thumbs.db
|
140
aio/README.md
140
aio/README.md
@ -1,140 +0,0 @@
|
||||
# Angular documentation project (https://angular.io)
|
||||
|
||||
Everything in this folder is part of the documentation project. This includes
|
||||
|
||||
* the web site for displaying the documentation
|
||||
* the dgeni configuration for converting source files to rendered files that can be viewed in the web site.
|
||||
* the tooling for setting up examples for development; and generating live-example and zip files from the examples.
|
||||
|
||||
## Developer tasks
|
||||
|
||||
We use [Yarn](https://yarnpkg.com) to manage the dependencies and to run build tasks.
|
||||
You should run all these tasks from the `angular/aio` folder.
|
||||
Here are the most important tasks you might need to use:
|
||||
|
||||
* `yarn` - install all the dependencies.
|
||||
* `yarn setup` - install all the dependencies, boilerplate, stackblitz, zips and run dgeni on the docs.
|
||||
* `yarn setup-local` - same as `setup`, but build the Angular packages from the source code and use these locally built versions (instead of the ones fetched from npm) for aio and docs examples boilerplate.
|
||||
|
||||
* `yarn build` - create a production build of the application (after installing dependencies, boilerplate, etc).
|
||||
* `yarn build-local` - same as `build`, but use `setup-local` instead of `setup`.
|
||||
* `yarn build-local-with-viewengine` - same as `build-local`, but in addition also turns on `ViewEngine` (pre-Ivy) mode in aio.
|
||||
(Note: To turn on `ViewEngine` mode in docs examples, see `yarn boilerplate:add:viewengine` below.)
|
||||
|
||||
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
|
||||
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
|
||||
* `yarn lint` - check that the doc-viewer code follows our style rules.
|
||||
* `yarn test` - watch all the source files, for the doc-viewer, and run all the unit tests when any change.
|
||||
* `yarn test --watch=false` - run all the unit tests once.
|
||||
* `yarn e2e` - run all the e2e tests for the doc-viewer.
|
||||
|
||||
* `yarn docs` - generate all the docs from the source files.
|
||||
* `yarn docs-watch` - watch the Angular source and the docs files and run a short-circuited doc-gen for the docs that changed.
|
||||
* `yarn docs-lint` - check that the doc gen code follows our style rules.
|
||||
* `yarn docs-test` - run the unit tests for the doc generation code.
|
||||
|
||||
* `yarn boilerplate:add` - generate all the boilerplate code for the examples, so that they can be run locally.
|
||||
* `yarn boilerplate:add:viewengine` - same as `boilerplate:add` but also turns on `ViewEngine` (pre-Ivy) mode.
|
||||
|
||||
* `yarn boilerplate:remove` - remove all the boilerplate code that was added via `yarn boilerplate:add`.
|
||||
* `yarn generate-stackblitz` - generate the stackblitz files that are used by the `live-example` tags in the docs.
|
||||
* `yarn generate-zips` - generate the zip files from the examples. Zip available via the `live-example` tags in the docs.
|
||||
|
||||
* `yarn example-e2e` - run all e2e tests for examples. Available options:
|
||||
- `--setup`: generate boilerplate, force webdriver update & other setup, then run tests.
|
||||
- `--local`: run e2e tests with the local version of Angular contained in the "dist" folder.
|
||||
_Requires `--setup` in order to take effect._
|
||||
- `--viewengine`: run e2e tests in `ViewEngine` (pre-Ivy) mode.
|
||||
- `--filter=foo`: limit e2e tests to those containing the word "foo".
|
||||
|
||||
> **Note for Windows users**
|
||||
>
|
||||
> Setting up the examples involves creating some [symbolic links](https://en.wikipedia.org/wiki/Symbolic_link) (see [here](./tools/examples/README.md#symlinked-node_modules) for details). On Windows, this requires to either have [Developer Mode enabled](https://blogs.windows.com/windowsdeveloper/2016/12/02/symlinks-windows-10) (supported on Windows 10 or newer) or run the setup commands as administrator.
|
||||
>
|
||||
> The affected commands are:
|
||||
> - `yarn setup` / `yarn setup-*`
|
||||
> - `yarn build` / `yarn build-*`
|
||||
> - `yarn boilerplate:add`
|
||||
> - `yarn example-e2e --setup`
|
||||
|
||||
## Using ServiceWorker locally
|
||||
|
||||
Running `yarn start` (even when explicitly targeting production mode) does not set up the
|
||||
ServiceWorker. If you want to test the ServiceWorker locally, you can use `yarn build` and then
|
||||
serve the files in `dist/` with `yarn http-server dist -p 4200`.
|
||||
|
||||
|
||||
## Guide to authoring
|
||||
|
||||
There are two types of content in the documentation:
|
||||
|
||||
* **API docs**: descriptions of the modules, classes, interfaces, decorators, etc that make up the Angular platform.
|
||||
API docs are generated directly from the source code.
|
||||
The source code is contained in TypeScript files, located in the `angular/packages` folder.
|
||||
Each API item may have a preceding comment, which contains JSDoc style tags and content.
|
||||
The content is written in markdown.
|
||||
|
||||
* **Other content**: guides, tutorials, and other marketing material.
|
||||
All other content is written using markdown in text files, located in the `angular/aio/content` folder.
|
||||
More specifically, there are sub-folders that contain particular types of content: guides, tutorial and marketing.
|
||||
|
||||
* **Code examples**: code examples need to be testable to ensure their accuracy.
|
||||
Also, our examples have a specific look and feel and allow the user to copy the source code. For larger
|
||||
examples they are rendered in a tabbed interface (e.g. template, HTML, and TypeScript on separate
|
||||
tabs). Additionally, some are live examples, which provide links where the code can be edited, executed, and/or downloaded. For details on working with code examples, please read the [Code snippets](https://angular.io/guide/docs-style-guide#code-snippets), [Source code markup](https://angular.io/guide/docs-style-guide#source-code-markup), and [Live examples](https://angular.io/guide/docs-style-guide#live-examples) pages of the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
||||
|
||||
We use the [dgeni](https://github.com/angular/dgeni) tool to convert these files into docs that can be viewed in the doc-viewer.
|
||||
|
||||
The [Authors Style Guide](https://angular.io/guide/docs-style-guide) prescribes guidelines for
|
||||
writing guide pages, explains how to use the documentation classes and components, and how to markup sample source code to produce code snippets.
|
||||
|
||||
### Generating the complete docs
|
||||
|
||||
The main task for generating the docs is `yarn docs`. This will process all the source files (API and other),
|
||||
extracting the documentation and generating JSON files that can be consumed by the doc-viewer.
|
||||
|
||||
### Partial doc generation for editors
|
||||
|
||||
Full doc generation can take up to one minute. That's too slow for efficient document creation and editing.
|
||||
|
||||
You can make small changes in a smart editor that displays formatted markdown:
|
||||
>In VS Code, _Cmd-K, V_ opens markdown preview in side pane; _Cmd-B_ toggles left sidebar
|
||||
|
||||
You also want to see those changes displayed properly in the doc viewer
|
||||
with a quick, edit/view cycle time.
|
||||
|
||||
For this purpose, use the `yarn docs-watch` task, which watches for changes to source files and only
|
||||
re-processes the files necessary to generate the docs that are related to the file that has changed.
|
||||
Since this task takes shortcuts, it is much faster (often less than 1 second) but it won't produce full
|
||||
fidelity content. For example, links to other docs and code examples may not render correctly. This is
|
||||
most particularly noticed in links to other docs and in the embedded examples, which may not always render
|
||||
correctly.
|
||||
|
||||
The general setup is as follows:
|
||||
|
||||
* Open a terminal, ensure the dependencies are installed; run an initial doc generation; then start the doc-viewer:
|
||||
|
||||
```bash
|
||||
yarn setup
|
||||
yarn start
|
||||
```
|
||||
|
||||
* Open a second terminal and start watching the docs
|
||||
|
||||
```bash
|
||||
yarn docs-watch
|
||||
```
|
||||
|
||||
>Alternatively, try the consolidated `serve-and-sync` command that builds, watches and serves in the same terminal window
|
||||
```bash
|
||||
yarn serve-and-sync
|
||||
```
|
||||
|
||||
* Open a browser at https://localhost:4200/ and navigate to the document on which you want to work.
|
||||
You can automatically open the browser by using `yarn start -o` in the first terminal.
|
||||
|
||||
* Make changes to the page's associated doc or example files. Every time a file is saved, the doc will
|
||||
be regenerated, the app will rebuild and the page will reload.
|
||||
|
||||
* If you get a build error complaining about examples or any other odd behavior, be sure to consult
|
||||
the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
@ -1,3 +0,0 @@
|
||||
scripts-js/lib
|
||||
scripts-js/node_modules
|
||||
scripts-js/**/test
|
@ -1,179 +0,0 @@
|
||||
# Image metadata and config
|
||||
FROM debian:buster
|
||||
|
||||
LABEL name="angular.io PR preview" \
|
||||
description="This image implements the PR preview functionality for angular.io." \
|
||||
vendor="Angular" \
|
||||
version="1.0"
|
||||
|
||||
VOLUME /aio-secrets
|
||||
VOLUME /var/www/aio-builds
|
||||
VOLUME /dockerbuild
|
||||
|
||||
EXPOSE 80 443
|
||||
|
||||
|
||||
# Build-time args and env vars
|
||||
# The AIO_ARTIFACT_PATH path needs to be kept in synch with the value of
|
||||
# `aio_preview->steps->store_artifacts->destination` property in `.circleci/config.yml`
|
||||
ARG AIO_ARTIFACT_PATH=aio/dist/aio-snapshot.tgz
|
||||
ARG TEST_AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH
|
||||
ARG AIO_BUILDS_DIR=/var/www/aio-builds
|
||||
ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds
|
||||
ARG AIO_DOMAIN_NAME=ngbuilds.io
|
||||
ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost
|
||||
ARG AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG TEST_AIO_GITHUB_ORGANIZATION=test-org
|
||||
ARG AIO_GITHUB_REPO=angular
|
||||
ARG TEST_AIO_GITHUB_REPO=test-repo
|
||||
ARG AIO_GITHUB_TEAM_SLUGS=aio-auto-previews,aio-contributors
|
||||
ARG TEST_AIO_GITHUB_TEAM_SLUGS=test-team-1,test-team-2
|
||||
ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME
|
||||
ARG TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_DOMAIN_NAME
|
||||
ARG AIO_NGINX_PORT_HTTP=80
|
||||
ARG TEST_AIO_NGINX_PORT_HTTP=8080
|
||||
ARG AIO_NGINX_PORT_HTTPS=443
|
||||
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||
ARG AIO_SIGNIFICANT_FILES_PATTERN='^(?:aio|packages)/(?!.*[._]spec\\.[jt]s$)'
|
||||
ARG TEST_AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN
|
||||
ARG AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL
|
||||
ARG AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME
|
||||
ARG AIO_ARTIFACT_MAX_SIZE=26214400
|
||||
ARG TEST_AIO_ARTIFACT_MAX_SIZE=200
|
||||
ARG AIO_PREVIEW_SERVER_PORT=3000
|
||||
ARG TEST_AIO_PREVIEW_SERVER_PORT=3001
|
||||
|
||||
ENV AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH TEST_AIO_ARTIFACT_PATH=$TEST_AIO_ARTIFACT_PATH \
|
||||
AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
|
||||
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
|
||||
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
|
||||
AIO_GITHUB_REPO=$AIO_GITHUB_REPO TEST_AIO_GITHUB_REPO=$TEST_AIO_GITHUB_REPO \
|
||||
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
|
||||
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
|
||||
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
|
||||
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
|
||||
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
|
||||
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
|
||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||
AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN TEST_AIO_SIGNIFICANT_FILES_PATTERN=$TEST_AIO_SIGNIFICANT_FILES_PATTERN \
|
||||
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
|
||||
AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME TEST_AIO_PREVIEW_SERVER_HOSTNAME=$TEST_AIO_PREVIEW_SERVER_HOSTNAME \
|
||||
AIO_ARTIFACT_MAX_SIZE=$AIO_ARTIFACT_MAX_SIZE TEST_AIO_ARTIFACT_MAX_SIZE=$TEST_AIO_ARTIFACT_MAX_SIZE \
|
||||
AIO_PREVIEW_SERVER_PORT=$AIO_PREVIEW_SERVER_PORT TEST_AIO_PREVIEW_SERVER_PORT=$TEST_AIO_PREVIEW_SERVER_PORT \
|
||||
AIO_WWW_USER=www-data \
|
||||
NODE_ENV=production
|
||||
|
||||
|
||||
# Create directory for logs
|
||||
RUN mkdir /var/log/aio
|
||||
|
||||
|
||||
# Add extra package sources
|
||||
RUN apt-get update -y && apt-get install -y curl=7.64.0-4+deb10u1
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_12.x | bash -
|
||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
|
||||
|
||||
# Install packages
|
||||
# NOTE: Some packages (such as `nginx`, `nodejs`, `openssl`) make older versions unavailable on the
|
||||
# repositories, so we cannot pin to specific versions for these packages :(
|
||||
# See for example:
|
||||
# - https://github.com/nodesource/distributions/issues/33
|
||||
# - https://askubuntu.com/questions/715104/how-can-i-downgrade-openssl-via-apt-get
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
cron=3.0pl1-134+deb10u1 \
|
||||
dnsmasq=2.80-1 \
|
||||
nano=3.2-3 \
|
||||
nginx \
|
||||
nodejs \
|
||||
openssl \
|
||||
rsyslog=8.1901.0-1 \
|
||||
vim=2:8.1.0875-5 \
|
||||
yarn=1.22.4-1
|
||||
RUN yarn global add pm2@4.4.0
|
||||
|
||||
|
||||
# Set up log rotation
|
||||
COPY logrotate/* /etc/logrotate.d/
|
||||
RUN chmod 0644 /etc/logrotate.d/*
|
||||
|
||||
|
||||
# Set up cronjobs
|
||||
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
|
||||
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
|
||||
RUN crontab /etc/cron.d/aio-builds-cleanup
|
||||
RUN printenv | grep AIO_ >> /etc/environment
|
||||
|
||||
|
||||
# Set up dnsmasq
|
||||
COPY dnsmasq/dnsmasq.conf /etc/
|
||||
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
|
||||
|
||||
# Set up SSL/TLS certificates
|
||||
COPY nginx/create-selfsigned-cert.sh /tmp/
|
||||
RUN chmod a+x /tmp/create-selfsigned-cert.sh
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-prod" "$AIO_NGINX_HOSTNAME" "$AIO_LOCALCERTS_DIR"
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-test" "$TEST_AIO_NGINX_HOSTNAME" "$TEST_AIO_LOCALCERTS_DIR"
|
||||
RUN rm /tmp/create-selfsigned-cert.sh
|
||||
RUN update-ca-certificates
|
||||
|
||||
|
||||
# Set up nginx (for production and testing)
|
||||
RUN sed -i -E "s|^user\s+\S+;|user $AIO_WWW_USER;|" /etc/nginx/nginx.conf
|
||||
RUN rm -f /etc/nginx/conf.d/*
|
||||
RUN rm -f /etc/nginx/sites-enabled/*
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$TEST_AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$TEST_AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$TEST_AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
|
||||
|
||||
# Set up pm2
|
||||
RUN pm2 startup --user root > /dev/null
|
||||
|
||||
|
||||
# Set up the shell scripts
|
||||
COPY scripts-sh/ $AIO_SCRIPTS_SH_DIR/
|
||||
RUN chmod a+x $AIO_SCRIPTS_SH_DIR/*
|
||||
RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
|
||||
| while read file; do ln -s $AIO_SCRIPTS_SH_DIR/$file /usr/local/bin/aio-${file%.*}; done
|
||||
|
||||
|
||||
# Set up the Node.js scripts
|
||||
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn --cwd="$AIO_SCRIPTS_JS_DIR/" install --production --frozen-lockfile
|
||||
|
||||
|
||||
# Set up health check
|
||||
HEALTHCHECK --interval=5m CMD /usr/local/bin/aio-health-check
|
||||
|
||||
|
||||
# Go!
|
||||
WORKDIR /
|
||||
CMD aio-init && tail -f /dev/null
|
@ -1,2 +0,0 @@
|
||||
# Periodically clean up builds that do not correspond to currently open PRs
|
||||
0 12 * * * /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1
|
@ -1,16 +0,0 @@
|
||||
# Do not read /etc/resolv.conf. Get servers from this file instead.
|
||||
no-resolv
|
||||
server=8.8.8.8
|
||||
server=8.8.4.4
|
||||
|
||||
# Listen for DHCP and DNS requests only on this address.
|
||||
listen-address=127.0.0.1
|
||||
|
||||
# Force an IP address for these domains.
|
||||
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
|
||||
# Run as root (required from inside docker container).
|
||||
user=root
|
@ -1,9 +0,0 @@
|
||||
/var/log/aio/clean-up.log /var/log/aio/init.log /var/log/aio/verify-setup.log {
|
||||
compress
|
||||
create
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
/var/log/aio/nginx/*.log /var/log/aio/nginx-test/*.log {
|
||||
compress
|
||||
create
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
sharedscripts
|
||||
postrotate
|
||||
service nginx rotate >/dev/null 2>&1
|
||||
endscript
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
/var/log/aio/preview-server-*.log {
|
||||
compress
|
||||
copytruncate
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -1,123 +0,0 @@
|
||||
# Redirect all HTTP traffic to HTTPS
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTP}} default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTP}};
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Ideally we want 308 (permanent + keep original method),
|
||||
# but it is relatively new and not supported by some clients (e.g. cURL).
|
||||
return 307 https://$host:{{$AIO_NGINX_PORT_HTTPS}}$request_uri;
|
||||
}
|
||||
|
||||
# Serve PR-preview requests
|
||||
server {
|
||||
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{7,40})\.";
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||
|
||||
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
|
||||
disable_symlinks on from=$document_root;
|
||||
index index.html;
|
||||
|
||||
gzip on;
|
||||
gzip_comp_level 7;
|
||||
gzip_types *;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
error_page 404 /404.html;
|
||||
location "=/404.html" {
|
||||
internal;
|
||||
}
|
||||
|
||||
location "~/[^/]+\.[^/]+$" {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html =404;
|
||||
}
|
||||
}
|
||||
|
||||
# Handle all other requests
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2 default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Health check
|
||||
location "~^/health-check/?$" {
|
||||
add_header Content-Type text/plain;
|
||||
return 200 '';
|
||||
}
|
||||
|
||||
# Check PRs previewability
|
||||
location "~^/can-have-public-preview/\d+/?$" {
|
||||
if ($request_method != "GET") {
|
||||
add_header Allow "GET";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method GET;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Notify about CircleCI builds
|
||||
location "~^/circle-build/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Notify about PR changes
|
||||
location "~^/pr-updated/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Everything else
|
||||
location / {
|
||||
return 404;
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eu -o pipefail
|
||||
|
||||
|
||||
# Variables
|
||||
confFile=/tmp/$1.conf
|
||||
domainName=$2
|
||||
outDir=$3
|
||||
|
||||
|
||||
# Create certificate
|
||||
cp /etc/ssl/openssl.cnf "$confFile"
|
||||
echo "[subjectAltName]" >> "$confFile"
|
||||
echo "subjectAltName = DNS:$domainName, DNS:*.$domainName" >> "$confFile"
|
||||
mkdir -p $outDir
|
||||
openssl req -days 365 -newkey rsa:2048 -nodes -sha256 -x509 \
|
||||
-config "$confFile" -extensions subjectAltName -subj "/CN=$domainName" \
|
||||
-out "$outDir/$domainName.crt" -keyout "$outDir/$domainName.key"
|
||||
chmod -R 400 "$outDir"
|
||||
cp "$outDir/$domainName.crt" /usr/local/share/ca-certificates
|
@ -1,2 +0,0 @@
|
||||
/dist
|
||||
/node_modules
|
@ -1,128 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty, getPrInfoFromDownloadPath, Logger} from '../common/utils';
|
||||
|
||||
// Classes
|
||||
export class BuildCleaner {
|
||||
|
||||
private logger = new Logger('BuildCleaner');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string, protected githubOrg: string, protected githubRepo: string,
|
||||
protected githubToken: string, protected downloadsDir: string, protected artifactPath: string) {
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
assertNotMissingOrEmpty('downloadsDir', downloadsDir);
|
||||
assertNotMissingOrEmpty('artifactPath', artifactPath);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public async cleanUp(): Promise<void> {
|
||||
try {
|
||||
this.logger.log('Cleaning up builds and downloads');
|
||||
const openPrs = await this.getOpenPrNumbers();
|
||||
this.logger.log(`Open pull requests: ${openPrs.length}`);
|
||||
await Promise.all([
|
||||
this.cleanBuilds(openPrs),
|
||||
this.cleanDownloads(openPrs),
|
||||
]);
|
||||
} catch (error) {
|
||||
this.logger.error('ERROR:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async cleanBuilds(openPrs: number[]): Promise<void> {
|
||||
const existingBuilds = await this.getExistingBuildNumbers();
|
||||
await this.removeUnnecessaryBuilds(existingBuilds, openPrs);
|
||||
}
|
||||
|
||||
public async cleanDownloads(openPrs: number[]): Promise<void> {
|
||||
const existingDownloads = await this.getExistingDownloads();
|
||||
await this.removeUnnecessaryDownloads(existingDownloads, openPrs);
|
||||
}
|
||||
|
||||
public getExistingBuildNumbers(): Promise<number[]> {
|
||||
return new Promise<number[]>((resolve, reject) => {
|
||||
fs.readdir(this.buildsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const buildNumbers = files.
|
||||
map(name => name.replace(HIDDEN_DIR_PREFIX, '')). // Remove the "hidden dir" prefix
|
||||
map(Number). // Convert string to number
|
||||
filter(Boolean); // Ignore NaN (or 0), because they are not builds
|
||||
|
||||
resolve(buildNumbers);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public async getOpenPrNumbers(): Promise<number[]> {
|
||||
const api = new GithubApi(this.githubToken);
|
||||
const githubPullRequests = new GithubPullRequests(api, this.githubOrg, this.githubRepo);
|
||||
const prs = await githubPullRequests.fetchAll('open');
|
||||
return prs.map(pr => pr.number);
|
||||
}
|
||||
|
||||
public removeDir(dir: string): void {
|
||||
try {
|
||||
if (shell.test('-d', dir)) {
|
||||
shell.chmod('-R', 'a+w', dir);
|
||||
shell.rm('-rf', dir);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
public removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]): void {
|
||||
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
|
||||
|
||||
this.logger.log(`Existing builds: ${existingBuildNumbers.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||
|
||||
// Try removing public dirs.
|
||||
toRemove.
|
||||
map(num => path.join(this.buildsDir, String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
|
||||
// Try removing hidden dirs.
|
||||
toRemove.
|
||||
map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
}
|
||||
|
||||
public getExistingDownloads(): Promise<string[]> {
|
||||
const artifactFile = path.basename(this.artifactPath);
|
||||
return new Promise<string[]>((resolve, reject) => {
|
||||
fs.readdir(this.downloadsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
files = files.filter(file => file.endsWith(artifactFile));
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public removeUnnecessaryDownloads(existingDownloads: string[], openPrNumbers: number[]): void {
|
||||
const toRemove = existingDownloads.filter(filePath => {
|
||||
const {pr} = getPrInfoFromDownloadPath(filePath);
|
||||
return !openPrNumbers.includes(pr);
|
||||
});
|
||||
|
||||
this.logger.log(`Existing downloads: ${existingDownloads.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} download(s): ${toRemove.join(', ')}`);
|
||||
|
||||
toRemove.forEach(filePath => shell.rm(path.join(this.downloadsDir, filePath)));
|
||||
}
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
} from '../common/env-variables';
|
||||
import {BuildCleaner} from './build-cleaner';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
const buildCleaner = new BuildCleaner(
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_DOWNLOADS_DIR,
|
||||
AIO_ARTIFACT_PATH);
|
||||
|
||||
buildCleaner.cleanUp().catch(() => process.exit(1));
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
// Imports
|
||||
import fetch from 'node-fetch';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const CIRCLE_CI_API_URL = 'https://circleci.com/api/v1.1/project/github';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface ArtifactInfo {
|
||||
path: string;
|
||||
pretty_path: string;
|
||||
node_index: number;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export type ArtifactResponse = ArtifactInfo[];
|
||||
|
||||
export interface BuildInfo {
|
||||
reponame: string;
|
||||
failed: boolean;
|
||||
branch: string;
|
||||
username: string;
|
||||
build_num: number;
|
||||
has_artifacts: boolean;
|
||||
outcome: string; // e.g. 'success'
|
||||
vcs_revision: string; // HEAD SHA
|
||||
// there are other fields but they are not used in this code
|
||||
}
|
||||
|
||||
/**
|
||||
* A Helper that can interact with the CircleCI API.
|
||||
*/
|
||||
export class CircleCiApi {
|
||||
|
||||
private tokenParam = `circle-token=${this.circleCiToken}`;
|
||||
|
||||
/**
|
||||
* Construct a helper that can interact with the CircleCI REST API.
|
||||
* @param githubOrg The Github organisation whose repos we want to access in CircleCI (e.g. angular).
|
||||
* @param githubRepo The Github repo whose builds we want to access in CircleCI (e.g. angular).
|
||||
* @param circleCiToken The CircleCI API access token (secret).
|
||||
*/
|
||||
constructor(
|
||||
private githubOrg: string,
|
||||
private githubRepo: string,
|
||||
private circleCiToken: string,
|
||||
) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('circleCiToken', circleCiToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the info for a build from the CircleCI API
|
||||
* @param buildNumber The CircleCI build number that generated the artifact.
|
||||
* @returns A promise to the info about the build
|
||||
*/
|
||||
public async getBuildInfo(buildNumber: number): Promise<BuildInfo> {
|
||||
try {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
const response = await fetch(`${baseUrl}?${this.tokenParam}`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`${baseUrl}: ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI build info request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the CircleCI API to get a URL for a specified artifact from a specified build.
|
||||
* @param artifactPath The path, within the build to the artifact.
|
||||
* @returns A promise to the URL that can be requested to download the actual build artifact file.
|
||||
*/
|
||||
public async getBuildArtifactUrl(buildNumber: number, artifactPath: string): Promise<string> {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/artifacts?${this.tokenParam}`);
|
||||
const artifacts = await response.json() as ArtifactResponse;
|
||||
const artifact = artifacts.find(item => item.path === artifactPath);
|
||||
if (!artifact) {
|
||||
throw new Error(`Missing artifact (${artifactPath}) for CircleCI build: ${buildNumber}`);
|
||||
}
|
||||
return artifact.url;
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI artifact URL request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
// Constants
|
||||
export const AIO_DOWNLOADS_DIR = '/tmp/aio-downloads';
|
||||
export const HIDDEN_DIR_PREFIX = 'hidden--';
|
||||
export const SHORT_SHA_LEN = 7;
|
@ -1,19 +0,0 @@
|
||||
import {getEnvVar} from './utils';
|
||||
|
||||
export const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
export const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
export const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
export const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
export const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
|
||||
export const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
export const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
export const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
||||
export const AIO_NGINX_HOSTNAME = getEnvVar('AIO_NGINX_HOSTNAME');
|
||||
export const AIO_NGINX_PORT_HTTP = +getEnvVar('AIO_NGINX_PORT_HTTP');
|
||||
export const AIO_NGINX_PORT_HTTPS = +getEnvVar('AIO_NGINX_PORT_HTTPS');
|
||||
export const AIO_SIGNIFICANT_FILES_PATTERN = getEnvVar('AIO_SIGNIFICANT_FILES_PATTERN');
|
||||
export const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
export const AIO_PREVIEW_SERVER_HOSTNAME = getEnvVar('AIO_PREVIEW_SERVER_HOSTNAME');
|
||||
export const AIO_PREVIEW_SERVER_PORT = +getEnvVar('AIO_PREVIEW_SERVER_PORT');
|
||||
export const AIO_ARTIFACT_MAX_SIZE = +getEnvVar('AIO_ARTIFACT_MAX_SIZE');
|
||||
export const AIO_WWW_USER = getEnvVar('AIO_WWW_USER');
|
@ -1,111 +0,0 @@
|
||||
// Imports
|
||||
import {IncomingMessage} from 'http';
|
||||
import * as https from 'https';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const GITHUB_HOSTNAME = 'api.github.com';
|
||||
|
||||
// Interfaces - Types
|
||||
interface RequestParams {
|
||||
[key: string]: string | number;
|
||||
}
|
||||
|
||||
type RequestParamsOrNull = RequestParams | null;
|
||||
|
||||
// Classes
|
||||
export class GithubApi {
|
||||
protected requestHeaders: {[key: string]: string};
|
||||
|
||||
// Constructor
|
||||
constructor(githubToken: string) {
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
|
||||
this.requestHeaders = {
|
||||
'Authorization': `token ${githubToken}`,
|
||||
'User-Agent': `Node/${process.versions.node}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public get<T = any>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('get', path);
|
||||
}
|
||||
|
||||
public post<T = any>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('post', path, data);
|
||||
}
|
||||
|
||||
// In GitHub API paginated requests, page numbering is 1-based. (https://developer.github.com/v3/#pagination)
|
||||
public getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 1): Promise<T[]> {
|
||||
const perPage = 100;
|
||||
const params = {
|
||||
...baseParams,
|
||||
page: currentPage,
|
||||
per_page: perPage,
|
||||
};
|
||||
|
||||
return this.get<T[]>(pathname, params).then(items => {
|
||||
if (items.length < perPage) {
|
||||
return items;
|
||||
}
|
||||
|
||||
return this.getPaginated<T>(pathname, baseParams, currentPage + 1).then(moreItems => [...items, ...moreItems]);
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
|
||||
if (params == null) {
|
||||
return pathname;
|
||||
}
|
||||
|
||||
const search = (params === null) ? '' : this.serializeSearchParams(params);
|
||||
const joiner = search && '?';
|
||||
|
||||
return `${pathname}${joiner}${search}`;
|
||||
}
|
||||
|
||||
protected request<T>(method: string, path: string, data: any = null): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const options = {
|
||||
headers: {...this.requestHeaders},
|
||||
host: GITHUB_HOSTNAME,
|
||||
method,
|
||||
path,
|
||||
};
|
||||
|
||||
const onError = (statusCode: number, responseText: string) => {
|
||||
const url = `https://${GITHUB_HOSTNAME}${path}`;
|
||||
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
|
||||
};
|
||||
const onSuccess = (responseText: string) => {
|
||||
try { resolve(responseText && JSON.parse(responseText)); } catch (err) { reject(err); }
|
||||
};
|
||||
const onResponse = (res: IncomingMessage) => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
const isSuccess = (200 <= statusCode) && (statusCode < 400);
|
||||
let responseText = '';
|
||||
|
||||
res.
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => isSuccess ? onSuccess(responseText) : onError(statusCode, responseText)).
|
||||
on('error', reject);
|
||||
};
|
||||
|
||||
https.
|
||||
request(options, onResponse).
|
||||
on('error', reject).
|
||||
end(data && JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
protected serializeSearchParams(params: RequestParams): string {
|
||||
return Object.keys(params).
|
||||
filter(key => params[key] != null).
|
||||
map(key => `${key}=${encodeURIComponent(String(params[key]))}`).
|
||||
join('&');
|
||||
}
|
||||
}
|
@ -1,79 +0,0 @@
|
||||
import {GithubApi} from './github-api';
|
||||
import {assert, assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface PullRequest {
|
||||
number: number;
|
||||
user: {login: string};
|
||||
labels: {name: string}[];
|
||||
}
|
||||
|
||||
export interface FileInfo {
|
||||
sha: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export type PullRequestState = 'all' | 'closed' | 'open';
|
||||
|
||||
/**
|
||||
* Access pull requests on GitHub.
|
||||
*/
|
||||
export class GithubPullRequests {
|
||||
public repoSlug: string;
|
||||
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
* @param githubRepo The repository on Github with whose PRs we will interact.
|
||||
*/
|
||||
constructor(private api: GithubApi, githubOrg: string, githubRepo: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
this.repoSlug = `${githubOrg}/${githubRepo}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a comment on a PR.
|
||||
* @param pr The number of the PR on which to comment.
|
||||
* @param body The body of the comment to post.
|
||||
* @returns A promise that resolves when the comment has been posted.
|
||||
*/
|
||||
public addComment(pr: number, body: string): Promise<any> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
assert(!!body, `Invalid or empty comment body: ${body}`);
|
||||
return this.api.post<any>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about a PR.
|
||||
* @param pr The number of the PR for which to request info.
|
||||
* @returns A promise that is resolves with information about the specified PR.
|
||||
*/
|
||||
public fetch(pr: number): Promise<PullRequest> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
// Using the `/issues/` URL, because the `/pulls/` one does not provide labels.
|
||||
return this.api.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all PRs that match the given state.
|
||||
* @param state Only retrieve PRs that have this state.
|
||||
* @returns A promise that is resolved with information about the requested PRs.
|
||||
*/
|
||||
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
||||
const pathname = `/repos/${this.repoSlug}/pulls`;
|
||||
const params = {state};
|
||||
|
||||
return this.api.getPaginated<PullRequest>(pathname, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request a list of files for the given PR.
|
||||
* @param pr The number of the PR for which to request files.
|
||||
* @returns A promise that resolves to an array of file information
|
||||
*/
|
||||
public fetchFiles(pr: number): Promise<FileInfo[]> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
return this.api.getPaginated<FileInfo>(`/repos/${this.repoSlug}/pulls/${pr}/files`);
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
import {GithubApi} from './github-api';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface Team {
|
||||
id: number;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
export interface TeamMembership {
|
||||
state: string;
|
||||
}
|
||||
|
||||
export class GithubTeams {
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
*/
|
||||
constructor(private api: GithubApi, protected githubOrg: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all the organisation's teams in GitHub.
|
||||
* @returns A promise that is resolved with information about the teams.
|
||||
*/
|
||||
public fetchAll(): Promise<Team[]> {
|
||||
return this.api.getPaginated<Team>(`/orgs/${this.githubOrg}/teams`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the specified username is a member of the specified team.
|
||||
* @param username The usernane to check for in the team.
|
||||
* @param teamIds The team to check for the username.
|
||||
* @returns a Promise that resolves to `true` if the username is a member of the team.
|
||||
*/
|
||||
public async isMemberById(username: string, teamIds: number[]): Promise<boolean> {
|
||||
|
||||
const getMembership = async (teamId: number) => {
|
||||
try {
|
||||
const {state} = await this.api.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`);
|
||||
return state === 'active';
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
for (const teamId of teamIds) {
|
||||
if (await getMembership(teamId)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the given username is a member of the teams specified by the team slugs.
|
||||
* @param username The username to check for in the teams.
|
||||
* @param teamSlugs A collection of slugs that represent the teams to check for the the username.
|
||||
* @returns a Promise that resolves to `true` if the usernane is a member of at least one of the specified teams.
|
||||
*/
|
||||
public async isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
|
||||
try {
|
||||
const teams = await this.fetchAll();
|
||||
const teamIds = teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id);
|
||||
return await this.isMemberById(username, teamIds);
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
// We can't use `import...from` here, because of the following mess:
|
||||
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
|
||||
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
|
||||
//
|
||||
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
|
||||
// `jasmine-core` module and the `jasmine` module).
|
||||
import Jasmine = require('jasmine');
|
||||
import 'source-map-support/register';
|
||||
|
||||
export const runTests = (specFiles: string[]) => {
|
||||
const config = {
|
||||
random: true,
|
||||
spec_files: specFiles,
|
||||
stopSpecOnExpectationFailure: true,
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', (reason: any) => console.log('Unhandled rejection:', reason));
|
||||
|
||||
const runner = new Jasmine({});
|
||||
runner.loadConfig(config);
|
||||
runner.onComplete((passed: boolean) => process.exit(passed ? 0 : 1));
|
||||
runner.execute();
|
||||
};
|
@ -1,98 +0,0 @@
|
||||
import {basename, resolve as resolvePath} from 'path';
|
||||
import {SHORT_SHA_LEN} from './constants';
|
||||
|
||||
/**
|
||||
* Shorten a SHA to make it more readable
|
||||
* @param sha The SHA to shorten.
|
||||
*/
|
||||
export function computeShortSha(sha: string) {
|
||||
return sha.substr(0, SHORT_SHA_LEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the path for a downloaded artifact file.
|
||||
* @param downloadsDir The directory where artifacts are downloaded
|
||||
* @param pr The PR associated with this artifact.
|
||||
* @param sha The SHA associated with the build for this artifact.
|
||||
* @param artifactPath The path to the artifact on CircleCI.
|
||||
* @returns The fully resolved location for the specified downloaded artifact.
|
||||
*/
|
||||
export function computeArtifactDownloadPath(downloadsDir: string, pr: number, sha: string, artifactPath: string) {
|
||||
return resolvePath(downloadsDir, `${pr}-${computeShortSha(sha)}-${basename(artifactPath)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the PR number and latest commit SHA from a downloaded file path.
|
||||
* @param downloadPath the path to the downloaded file.
|
||||
* @returns An object whose keys are the PR and SHA extracted from the file path.
|
||||
*/
|
||||
export function getPrInfoFromDownloadPath(downloadPath: string) {
|
||||
const file = basename(downloadPath);
|
||||
const [pr, sha] = file.split('-');
|
||||
return {pr: +pr, sha};
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a value is true.
|
||||
* @param value The value to assert.
|
||||
* @param message The message if the value is not true.
|
||||
*/
|
||||
export function assert(value: boolean, message: string) {
|
||||
if (!value) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a parameter is not equal to "".
|
||||
* @param name The name of the parameter.
|
||||
* @param value The value of the parameter.
|
||||
*/
|
||||
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => {
|
||||
assert(!!value, `Missing or empty required parameter '${name}'!`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get an environment variable.
|
||||
* @param name The name of the environment variable.
|
||||
* @param isOptional True if the variable is optional.
|
||||
* @returns The value of the variable or "" if it is optional and falsy.
|
||||
* @throws `Error` if the variable is falsy and not optional.
|
||||
*/
|
||||
export const getEnvVar = (name: string, isOptional = false): string => {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!isOptional && !value) {
|
||||
try {
|
||||
throw new Error(`ERROR: Missing required environment variable '${name}'!`);
|
||||
} catch (error) {
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return value || '';
|
||||
};
|
||||
|
||||
/**
|
||||
* A basic logger implementation.
|
||||
* Delegates to `console`, but prepends each message with the current date and specified scope (i.e caller).
|
||||
*/
|
||||
export class Logger {
|
||||
private padding = ' '.repeat(20 - this.scope.length);
|
||||
|
||||
/**
|
||||
* Create a new `Logger` instance for the specified `scope`.
|
||||
* @param scope The logger's scope (added to all messages).
|
||||
*/
|
||||
constructor(private scope: string) {}
|
||||
|
||||
public error(...args: any[]) { this.callMethod('error', args); }
|
||||
public info(...args: any[]) { this.callMethod('info', args); }
|
||||
public log(...args: any[]) { this.callMethod('log', args); }
|
||||
public warn(...args: any[]) { this.callMethod('warn', args); }
|
||||
|
||||
private callMethod(method: 'error' | 'info' | 'log' | 'warn', args: any[]) {
|
||||
console[method](`[${new Date()}]`, `${this.scope}:${this.padding}`, ...args);
|
||||
}
|
||||
}
|
@ -1,144 +0,0 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import {EventEmitter} from 'events';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
// Classes
|
||||
export class BuildCreator extends EventEmitter {
|
||||
|
||||
private logger = new Logger('BuildCreator');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string) {
|
||||
super();
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public create(pr: number, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
|
||||
// Use only part of the SHA for more readable URLs.
|
||||
sha = computeShortSha(sha);
|
||||
|
||||
const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic);
|
||||
const shaDir = path.join(prDir, sha);
|
||||
let dirToRemoveOnError: string;
|
||||
|
||||
return Promise.resolve().
|
||||
// If the same PR exists with different visibility, update the visibility first.
|
||||
then(() => this.updatePrVisibility(pr, isPublic)).
|
||||
then(() => Promise.all([this.exists(prDir), this.exists(shaDir)])).
|
||||
then(([prDirExisted, shaDirExisted]) => {
|
||||
if (shaDirExisted) {
|
||||
const publicOrNot = isPublic ? 'public' : 'non-public';
|
||||
throw new PreviewServerError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
}
|
||||
|
||||
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
||||
|
||||
return Promise.resolve().
|
||||
then(() => shell.mkdir('-p', shaDir)).
|
||||
then(() => this.extractArchive(archivePath, shaDir)).
|
||||
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha, isPublic))).
|
||||
then(() => undefined);
|
||||
}).
|
||||
catch(err => {
|
||||
if (dirToRemoveOnError) {
|
||||
shell.rm('-rf', dirToRemoveOnError);
|
||||
}
|
||||
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while creating preview at: ${shaDir}\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
public updatePrVisibility(pr: number, makePublic: boolean): Promise<boolean> {
|
||||
const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic);
|
||||
|
||||
return Promise.
|
||||
all([this.exists(otherVisPrDir), this.exists(targetVisPrDir)]).
|
||||
then(([otherVisPrDirExisted, targetVisPrDirExisted]) => {
|
||||
if (!otherVisPrDirExisted) {
|
||||
// No visibility change: Either the visibility is up-to-date or the PR does not exist.
|
||||
return false;
|
||||
} else if (targetVisPrDirExisted) {
|
||||
// Error: Directories for both visibilities exist.
|
||||
throw new PreviewServerError(409,
|
||||
`Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
|
||||
}
|
||||
|
||||
// Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`.
|
||||
return Promise.resolve().
|
||||
then(() => shell.mv(otherVisPrDir, targetVisPrDir)).
|
||||
then(() => this.listShasByDate(targetVisPrDir)).
|
||||
then(shas => this.emit(ChangedPrVisibilityEvent.type, new ChangedPrVisibilityEvent(+pr, shas, makePublic))).
|
||||
then(() => true);
|
||||
}).
|
||||
catch(err => {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected exists(fileOrDir: string): Promise<boolean> {
|
||||
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
|
||||
}
|
||||
|
||||
protected extractArchive(inputFile: string, outputDir: string): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const cmd = `tar --extract --gzip --directory "${outputDir}" --file "${inputFile}"`;
|
||||
|
||||
cp.exec(cmd, (err, _stdout, stderr) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
this.logger.warn(stderr);
|
||||
}
|
||||
|
||||
try {
|
||||
shell.chmod('-R', 'a-w', outputDir);
|
||||
shell.rm('-f', inputFile);
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected getCandidatePrDirs(pr: number, isPublic: boolean): {oldPrDir: string, newPrDir: string} {
|
||||
const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr);
|
||||
const publicPrDir = path.join(this.buildsDir, `${pr}`);
|
||||
|
||||
const oldPrDir = isPublic ? hiddenPrDir : publicPrDir;
|
||||
const newPrDir = isPublic ? publicPrDir : hiddenPrDir;
|
||||
|
||||
return {oldPrDir, newPrDir};
|
||||
}
|
||||
|
||||
protected listShasByDate(inputDir: string): Promise<string[]> {
|
||||
return Promise.resolve().
|
||||
then(() => shell.ls('-l', inputDir) as any as Promise<(fs.Stats & {name: string})[]>).
|
||||
// Keep directories only.
|
||||
// (Also, convert to standard Array - ShellJS provides custom `sort()` method for sorting file contents.)
|
||||
then(items => items.filter(item => item.isDirectory())).
|
||||
// Sort by modification date.
|
||||
then(items => items.sort((a, b) => a.mtime.getTime() - b.mtime.getTime())).
|
||||
// Return directory names.
|
||||
then(items => items.map(item => item.name));
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
// Classes
|
||||
export class ChangedPrVisibilityEvent {
|
||||
// Properties - Public, Static
|
||||
public static type = 'pr.changedVisibility';
|
||||
|
||||
// Constructor
|
||||
constructor(public pr: number, public shas: string[], public isPublic: boolean) {}
|
||||
}
|
||||
|
||||
export class CreatedBuildEvent {
|
||||
// Properties - Public, Static
|
||||
public static type = 'build.created';
|
||||
|
||||
// Constructor
|
||||
constructor(public pr: number, public sha: string, public isPublic: boolean) {}
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import fetch from 'node-fetch';
|
||||
import {dirname} from 'path';
|
||||
import {mkdir} from 'shelljs';
|
||||
import {promisify} from 'util';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {assert, assertNotMissingOrEmpty, computeArtifactDownloadPath, Logger} from '../common/utils';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
export interface GithubInfo {
|
||||
org: string;
|
||||
pr: number;
|
||||
repo: string;
|
||||
sha: string;
|
||||
success: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper that can get information about builds and download build artifacts.
|
||||
*/
|
||||
export class BuildRetriever {
|
||||
private logger = new Logger('BuildRetriever');
|
||||
constructor(private api: CircleCiApi, private downloadSizeLimit: number, private downloadDir: string) {
|
||||
assert(downloadSizeLimit > 0, 'Invalid parameter "downloadSizeLimit" should be a number greater than 0.');
|
||||
assertNotMissingOrEmpty('downloadDir', downloadDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get GitHub information about a build
|
||||
* @param buildNum The number of the build for which to retrieve the info.
|
||||
* @returns The Github org, repo, PR and latest SHA for the specified build.
|
||||
*/
|
||||
public async getGithubInfo(buildNum: number): Promise<GithubInfo> {
|
||||
const buildInfo = await this.api.getBuildInfo(buildNum);
|
||||
const githubInfo: GithubInfo = {
|
||||
org: buildInfo.username,
|
||||
pr: getPrFromBranch(buildInfo.branch),
|
||||
repo: buildInfo.reponame,
|
||||
sha: buildInfo.vcs_revision,
|
||||
success: !buildInfo.failed,
|
||||
};
|
||||
return githubInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a request to the given URL for a build artifact and store it locally.
|
||||
* @param buildNum the number of the CircleCI build whose artifact we want to download.
|
||||
* @param pr the number of the PR that triggered the CircleCI build.
|
||||
* @param sha the commit in the PR that triggered the CircleCI build.
|
||||
* @param artifactPath the path on CircleCI where the artifact was stored.
|
||||
* @returns A promise to the file path where the downloaded file was stored.
|
||||
*/
|
||||
public async downloadBuildArtifact(buildNum: number, pr: number, sha: string, artifactPath: string): Promise<string> {
|
||||
try {
|
||||
const outPath = computeArtifactDownloadPath(this.downloadDir, pr, sha, artifactPath);
|
||||
const downloadExists = await new Promise(resolve => fs.exists(outPath, exists => resolve(exists)));
|
||||
if (!downloadExists) {
|
||||
const url = await this.api.getBuildArtifactUrl(buildNum, artifactPath);
|
||||
const response = await fetch(url, {size: this.downloadSizeLimit});
|
||||
if (response.status !== 200) {
|
||||
throw new PreviewServerError(response.status, `Error ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
const buffer = await response.buffer();
|
||||
mkdir('-p', dirname(outPath));
|
||||
await promisify(fs.writeFile)(outPath, buffer);
|
||||
}
|
||||
return outPath;
|
||||
} catch (error) {
|
||||
this.logger.warn(error);
|
||||
const status = (error.type === 'max-size') ? 413 : 500;
|
||||
throw new PreviewServerError(status, `CircleCI artifact download failed (${error.message || error})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getPrFromBranch(branch: string): number {
|
||||
// CircleCI only exposes PR numbers via the `branch` field :-(
|
||||
const match = /^pull\/(\d+)$/.exec(branch);
|
||||
if (!match) {
|
||||
throw new Error(`No PR found in branch field: ${branch}`);
|
||||
}
|
||||
return +match[1];
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
|
||||
/**
|
||||
* A helper to verify whether builds are trusted.
|
||||
*/
|
||||
export class BuildVerifier {
|
||||
/**
|
||||
* Construct a new BuildVerifier instance.
|
||||
* @param prs A helper to access PR information.
|
||||
* @param teams A helper to access Github team information.
|
||||
* @param allowedTeamSlugs The teams that are trusted.
|
||||
* @param trustedPrLabel The github label that indicates that a PR is trusted.
|
||||
*/
|
||||
constructor(protected prs: GithubPullRequests, protected teams: GithubTeams,
|
||||
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
|
||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR contains files that are significant to the build.
|
||||
* @param pr The number of the PR to check
|
||||
* @param significantFilePattern A regex that selects files that are significant.
|
||||
*/
|
||||
public async getSignificantFilesChanged(pr: number, significantFilePattern: RegExp): Promise<boolean> {
|
||||
const files = await this.prs.fetchFiles(pr);
|
||||
return files.some(file => significantFilePattern.test(file.filename));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR is trusted.
|
||||
* @param pr The number of the PR to check.
|
||||
* @returns true if the PR is trusted.
|
||||
*/
|
||||
public async getPrIsTrusted(pr: number): Promise<boolean> {
|
||||
const prInfo = await this.prs.fetch(pr);
|
||||
return this.hasLabel(prInfo, this.trustedPrLabel) ||
|
||||
(await this.teams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
|
||||
}
|
||||
|
||||
protected hasLabel(prInfo: PullRequest, label: string): boolean {
|
||||
return prInfo.labels.some(labelObj => labelObj.name === label);
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_MAX_SIZE,
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_CIRCLE_CI_TOKEN,
|
||||
AIO_DOMAIN_NAME,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TEAM_SLUGS,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_PREVIEW_SERVER_HOSTNAME,
|
||||
AIO_PREVIEW_SERVER_PORT,
|
||||
AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
AIO_TRUSTED_PR_LABEL,
|
||||
} from '../common/env-variables';
|
||||
import {PreviewServerFactory} from './preview-server-factory';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
PreviewServerFactory
|
||||
.create({
|
||||
buildArtifactPath: AIO_ARTIFACT_PATH,
|
||||
buildsDir: AIO_BUILDS_DIR,
|
||||
circleCiToken: AIO_CIRCLE_CI_TOKEN,
|
||||
domainName: AIO_DOMAIN_NAME,
|
||||
downloadSizeLimit: AIO_ARTIFACT_MAX_SIZE,
|
||||
downloadsDir: AIO_DOWNLOADS_DIR,
|
||||
githubOrg: AIO_GITHUB_ORGANIZATION,
|
||||
githubRepo: AIO_GITHUB_REPO,
|
||||
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
|
||||
githubToken: AIO_GITHUB_TOKEN,
|
||||
significantFilesPattern: AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
|
||||
})
|
||||
.listen(AIO_PREVIEW_SERVER_PORT, AIO_PREVIEW_SERVER_HOSTNAME);
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
// Classes
|
||||
export class PreviewServerError extends Error {
|
||||
// Constructor
|
||||
constructor(public status: number = 500, message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, PreviewServerError.prototype);
|
||||
}
|
||||
}
|
@ -1,213 +0,0 @@
|
||||
// Imports
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {AddressInfo} from 'net';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assert, assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {BuildCreator} from './build-creator';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {BuildRetriever} from './build-retriever';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
import {respondWithError, throwRequestError} from './utils';
|
||||
|
||||
const AIO_PREVIEW_JOB = 'aio_preview';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface PreviewServerConfig {
|
||||
downloadsDir: string;
|
||||
downloadSizeLimit: number;
|
||||
buildArtifactPath: string;
|
||||
buildsDir: string;
|
||||
domainName: string;
|
||||
githubOrg: string;
|
||||
githubRepo: string;
|
||||
githubTeamSlugs: string[];
|
||||
circleCiToken: string;
|
||||
githubToken: string;
|
||||
significantFilesPattern: string;
|
||||
trustedPrLabel: string;
|
||||
}
|
||||
|
||||
const logger = new Logger('PreviewServer');
|
||||
|
||||
// Classes
|
||||
export class PreviewServerFactory {
|
||||
// Methods - Public
|
||||
public static create(cfg: PreviewServerConfig): http.Server {
|
||||
assertNotMissingOrEmpty('domainName', cfg.domainName);
|
||||
|
||||
const circleCiApi = new CircleCiApi(cfg.githubOrg, cfg.githubRepo, cfg.circleCiToken);
|
||||
const githubApi = new GithubApi(cfg.githubToken);
|
||||
const prs = new GithubPullRequests(githubApi, cfg.githubOrg, cfg.githubRepo);
|
||||
const teams = new GithubTeams(githubApi, cfg.githubOrg);
|
||||
|
||||
const buildRetriever = new BuildRetriever(circleCiApi, cfg.downloadSizeLimit, cfg.downloadsDir);
|
||||
const buildVerifier = new BuildVerifier(prs, teams, cfg.githubTeamSlugs, cfg.trustedPrLabel);
|
||||
const buildCreator = PreviewServerFactory.createBuildCreator(prs, cfg.buildsDir, cfg.domainName);
|
||||
|
||||
const middleware = PreviewServerFactory.createMiddleware(buildRetriever, buildVerifier, buildCreator, cfg);
|
||||
const httpServer = http.createServer(middleware as any);
|
||||
|
||||
httpServer.on('listening', () => {
|
||||
const info = httpServer.address() as AddressInfo;
|
||||
logger.info(`Up and running (and listening on ${info.address}:${info.port})...`);
|
||||
});
|
||||
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
public static createMiddleware(buildRetriever: BuildRetriever, buildVerifier: BuildVerifier,
|
||||
buildCreator: BuildCreator, cfg: PreviewServerConfig): express.Express {
|
||||
const middleware = express();
|
||||
const jsonParser = bodyParser.json();
|
||||
const significantFilesRe = new RegExp(cfg.significantFilesPattern);
|
||||
|
||||
// RESPOND TO IS-ALIVE PING
|
||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||
|
||||
// RESPOND TO CAN-HAVE-PUBLIC-PREVIEW CHECK
|
||||
const canHavePublicPreviewRe = /^\/can-have-public-preview\/(\d+)\/?$/;
|
||||
middleware.get(canHavePublicPreviewRe, async (req, res) => {
|
||||
try {
|
||||
const pr = +canHavePublicPreviewRe.exec(req.url)![1];
|
||||
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
// Cannot have preview: PR did not touch relevant files: `aio/` or `packages/` (except for spec files).
|
||||
res.send({canHavePublicPreview: false, reason: 'No significant files touched.'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because it did not touch any significant files.`);
|
||||
} else if (!await buildVerifier.getPrIsTrusted(pr)) {
|
||||
// Cannot have preview: PR not automatically verifiable as "trusted".
|
||||
res.send({canHavePublicPreview: false, reason: 'Not automatically verifiable as "trusted".'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because not automatically verifiable as "trusted".`);
|
||||
} else {
|
||||
// Can have preview.
|
||||
res.send({canHavePublicPreview: true, reason: null});
|
||||
logger.log(`PR:${pr} - Can have a public preview.`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Previewability check error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// CIRCLE_CI BUILD COMPLETE WEBHOOK
|
||||
middleware.post(/^\/circle-build\/?$/, jsonParser, async (req, res) => {
|
||||
try {
|
||||
if (!(
|
||||
req.is('json') &&
|
||||
req.body &&
|
||||
req.body.payload &&
|
||||
req.body.payload.build_num > 0 &&
|
||||
req.body.payload.build_parameters &&
|
||||
req.body.payload.build_parameters.CIRCLE_JOB
|
||||
)) {
|
||||
throwRequestError(400, `Incorrect body content. Expected JSON`, req);
|
||||
}
|
||||
|
||||
const job = req.body.payload.build_parameters.CIRCLE_JOB;
|
||||
const buildNum = req.body.payload.build_num;
|
||||
|
||||
logger.log(`Build:${buildNum}, Job:${job} - processing web-hook trigger`);
|
||||
|
||||
if (job !== AIO_PREVIEW_JOB) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`Build:${buildNum}, Job:${job} -`,
|
||||
`Skipping preview processing because this is not the "${AIO_PREVIEW_JOB}" job.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { pr, sha, org, repo, success } = await buildRetriever.getGithubInfo(buildNum);
|
||||
|
||||
if (!success) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - Skipping preview processing because this build did not succeed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
assert(cfg.githubOrg === org,
|
||||
`Invalid webhook: expected "githubOrg" property to equal "${cfg.githubOrg}" but got "${org}".`);
|
||||
assert(cfg.githubRepo === repo,
|
||||
`Invalid webhook: expected "githubRepo" property to equal "${cfg.githubRepo}" but got "${repo}".`);
|
||||
|
||||
// Do not deploy unless this PR has touched relevant files: `aio/` or `packages/` (except for spec files)
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - ` +
|
||||
`Skipping preview processing because this PR did not touch any significant files.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const artifactPath = await buildRetriever.downloadBuildArtifact(buildNum, pr, sha, cfg.buildArtifactPath);
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(pr);
|
||||
await buildCreator.create(pr, sha, artifactPath, isPublic);
|
||||
|
||||
res.sendStatus(isPublic ? 201 : 202);
|
||||
logger.log(`PR:${pr}, SHA:${computeShortSha(sha)}, Build:${buildNum} - ` +
|
||||
`Successfully created ${isPublic ? 'public' : 'non-public'} preview.`);
|
||||
} catch (err) {
|
||||
logger.error('CircleCI webhook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// GITHUB PR UPDATED WEBHOOK
|
||||
middleware.post(/^\/pr-updated\/?$/, jsonParser, async (req, res) => {
|
||||
const { action, number: prNo }: { action?: string, number?: number } = req.body;
|
||||
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
|
||||
|
||||
try {
|
||||
if (!visMayHaveChanged) {
|
||||
res.sendStatus(200);
|
||||
} else if (!prNo) {
|
||||
throwRequestError(400, `Missing or empty 'number' field`, req);
|
||||
} else {
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(prNo);
|
||||
await buildCreator.updatePrVisibility(prNo, isPublic);
|
||||
res.sendStatus(200);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('PR update hook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// ALL OTHER REQUESTS
|
||||
middleware.all('*', req => throwRequestError(404, 'Unknown resource', req));
|
||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => {
|
||||
const statusText = http.STATUS_CODES[err.status] || '???';
|
||||
logger.error(`Preview server error: ${err.status} - ${statusText}:`, err.message);
|
||||
respondWithError(res, err);
|
||||
});
|
||||
|
||||
return middleware;
|
||||
}
|
||||
|
||||
public static createBuildCreator(prs: GithubPullRequests, buildsDir: string, domainName: string): BuildCreator {
|
||||
const buildCreator = new BuildCreator(buildsDir);
|
||||
const postPreviewsComment = (pr: number, shas: string[]) => {
|
||||
const body = shas.
|
||||
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
|
||||
join('\n');
|
||||
|
||||
return prs.addComment(pr, body);
|
||||
};
|
||||
|
||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
|
||||
if (isPublic) {
|
||||
postPreviewsComment(pr, [sha]);
|
||||
}
|
||||
});
|
||||
|
||||
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
|
||||
if (isPublic && shas.length) {
|
||||
postPreviewsComment(pr, shas);
|
||||
}
|
||||
});
|
||||
|
||||
return buildCreator;
|
||||
}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import * as express from 'express';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
/**
|
||||
* Update the response to report that an error has occurred.
|
||||
* @param res The response to configure as an error.
|
||||
* @param err The error that needs to be reported.
|
||||
*/
|
||||
export async function respondWithError(res: express.Response, err: any): Promise<void> {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, String((err && err.message) || err));
|
||||
}
|
||||
|
||||
res.status(err.status);
|
||||
return new Promise(resolve => res.end(err.message, resolve));
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw an exception that describes the given error information.
|
||||
* @param status The HTTP status code include in the error.
|
||||
* @param error The error message to include in the error.
|
||||
* @param req The request that triggered this error.
|
||||
*/
|
||||
export function throwRequestError(status: number, error: string, req: express.Request): never {
|
||||
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
|
||||
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
|
||||
throw new PreviewServerError(status, message);
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
export const enum BuildNums {
|
||||
BUILD_INFO_ERROR = 1,
|
||||
BUILD_INFO_404,
|
||||
BUILD_INFO_BUILD_FAILED,
|
||||
BUILD_INFO_INVALID_GH_ORG,
|
||||
BUILD_INFO_INVALID_GH_REPO,
|
||||
CHANGED_FILES_ERROR,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
BUILD_ARTIFACTS_ERROR,
|
||||
BUILD_ARTIFACTS_404,
|
||||
BUILD_ARTIFACTS_EMPTY,
|
||||
BUILD_ARTIFACTS_MISSING,
|
||||
DOWNLOAD_ARTIFACT_ERROR,
|
||||
DOWNLOAD_ARTIFACT_404,
|
||||
DOWNLOAD_ARTIFACT_TOO_BIG,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
|
||||
export const enum PrNums {
|
||||
CHANGED_FILES_ERROR = 1,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
|
||||
export const SHA = '1234567890'.repeat(4);
|
||||
export const ALT_SHA = 'abcde'.repeat(8);
|
||||
export const SIMILAR_SHA = SHA.slice(0, -1) + 'A';
|
@ -1,10 +0,0 @@
|
||||
declare module 'delete-empty' {
|
||||
interface Options {
|
||||
dryRun: boolean;
|
||||
verbose: boolean;
|
||||
filter: (filePath: string) => boolean;
|
||||
}
|
||||
export default function deleteEmpty(cwd: string, options?: Options): Promise<string[]>;
|
||||
export default function deleteEmpty(cwd: string, options?: Options, callback?: (err: any, deleted: string[]) => void): void;
|
||||
export function sync(cwd: string, options?: Options): string[];
|
||||
}
|
@ -1,237 +0,0 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {AIO_DOWNLOADS_DIR, HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_NGINX_PORT_HTTP,
|
||||
AIO_NGINX_PORT_HTTPS,
|
||||
AIO_WWW_USER,
|
||||
} from '../common/env-variables';
|
||||
import {computeShortSha, Logger} from '../common/utils';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface CmdResult { success: boolean; err: Error | null; stdout: string; stderr: string; }
|
||||
export interface FileSpecs { content?: string; size?: number; }
|
||||
|
||||
export type CleanUpFn = () => void;
|
||||
export type TestSuiteFactory = (scheme: string, port: number) => void;
|
||||
export type VerifyCmdResultFn = (result: CmdResult) => void;
|
||||
|
||||
// Classes
|
||||
class Helper {
|
||||
|
||||
// Properties - Protected
|
||||
protected cleanUpFns: CleanUpFn[] = [];
|
||||
protected portPerScheme: {[scheme: string]: number} = {
|
||||
http: AIO_NGINX_PORT_HTTP,
|
||||
https: AIO_NGINX_PORT_HTTPS,
|
||||
};
|
||||
|
||||
private logger = new Logger('TestHelper');
|
||||
|
||||
// Constructor
|
||||
constructor() {
|
||||
shell.mkdir('-p', AIO_BUILDS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_BUILDS_DIR}`);
|
||||
shell.mkdir('-p', AIO_DOWNLOADS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_DOWNLOADS_DIR}`);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public cleanUp(): void {
|
||||
while (this.cleanUpFns.length) {
|
||||
// Clean-up fns remove themselves from the list.
|
||||
this.cleanUpFns[0]();
|
||||
}
|
||||
|
||||
const leftoverDownloads = fs.readdirSync(AIO_DOWNLOADS_DIR);
|
||||
const leftoverBuilds = fs.readdirSync(AIO_BUILDS_DIR);
|
||||
|
||||
if (leftoverDownloads.length) {
|
||||
this.logger.log(`Downloads directory '${AIO_DOWNLOADS_DIR}' is not empty after clean-up.`, leftoverDownloads);
|
||||
shell.rm('-rf', `${AIO_DOWNLOADS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length) {
|
||||
this.logger.log(`Builds directory '${AIO_BUILDS_DIR}' is not empty after clean-up.`, leftoverBuilds);
|
||||
shell.rm('-rf', `${AIO_BUILDS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length || leftoverDownloads.length) {
|
||||
throw new Error(`Unexpected test files not cleaned up.`);
|
||||
}
|
||||
}
|
||||
|
||||
public createDummyBuild(pr: number, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
|
||||
const prDir = this.getPrDir(pr, isPublic);
|
||||
const shaDir = this.getShaDir(prDir, sha, legacy);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
||||
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${prDir}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
||||
}
|
||||
|
||||
public getPrDir(pr: number, isPublic: boolean): string {
|
||||
const prDirName = isPublic ? '' + pr : HIDDEN_DIR_PREFIX + pr;
|
||||
return path.join(AIO_BUILDS_DIR, prDirName);
|
||||
}
|
||||
|
||||
public getShaDir(prDir: string, sha: string, legacy = false): string {
|
||||
return path.join(prDir, legacy ? sha : computeShortSha(sha));
|
||||
}
|
||||
|
||||
public readBuildFile(pr: number, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
return fs.readFileSync(absFilePath, 'utf8');
|
||||
}
|
||||
|
||||
public runCmd(cmd: string, opts: cp.ExecOptions = {}): Promise<CmdResult> {
|
||||
return new Promise(resolve => {
|
||||
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
|
||||
this.createCleanUpFn(() => proc.kill());
|
||||
});
|
||||
}
|
||||
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory): void {
|
||||
Object.entries(this.portPerScheme).forEach(([scheme, port]) => suiteFactory(scheme, port));
|
||||
}
|
||||
|
||||
public verifyResponse(status: number, regex: string | RegExp = /^/): VerifyCmdResultFn {
|
||||
return (result: CmdResult) => {
|
||||
const [headers, body] = result.stdout.
|
||||
split(/(?:\r?\n){2,}/).
|
||||
map(s => s.trim()).
|
||||
slice(-2); // In case of redirect, discard the previous headers.
|
||||
// Only keep the last to sections (final headers and body).
|
||||
|
||||
if (!result.success) {
|
||||
this.logger.log('Stdout:', result.stdout);
|
||||
this.logger.error('Stderr:', result.stderr);
|
||||
this.logger.error('Error:', result.err);
|
||||
}
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(headers).toMatch(new RegExp(`HTTP/(?:1\\.1|2) ${status} `));
|
||||
expect(body).toMatch(regex);
|
||||
};
|
||||
}
|
||||
|
||||
public writeBuildFile(pr: number, sha: string, relFilePath: string, content: string, isPublic = true,
|
||||
legacy = false): void {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
this.writeFile(absFilePath, {content}, true);
|
||||
}
|
||||
|
||||
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): void {
|
||||
if (!force && fs.existsSync(filePath)) {
|
||||
throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
|
||||
}
|
||||
|
||||
let cleanUpTarget = filePath;
|
||||
while (!fs.existsSync(path.dirname(cleanUpTarget))) {
|
||||
cleanUpTarget = path.dirname(cleanUpTarget);
|
||||
}
|
||||
|
||||
shell.mkdir('-p', path.dirname(filePath));
|
||||
if (size) {
|
||||
// Create a file of the specified size.
|
||||
cp.execSync(`fallocate -l ${size} ${filePath}`);
|
||||
} else {
|
||||
// Create a file with the specified content.
|
||||
fs.writeFileSync(filePath, content || '');
|
||||
}
|
||||
shell.exec(`chown ${AIO_WWW_USER} ${filePath}`);
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected createCleanUpFn(fn: () => void): CleanUpFn {
|
||||
const cleanUpFn = () => {
|
||||
const idx = this.cleanUpFns.indexOf(cleanUpFn);
|
||||
if (idx !== -1) {
|
||||
this.cleanUpFns.splice(idx, 1);
|
||||
fn();
|
||||
}
|
||||
};
|
||||
|
||||
this.cleanUpFns.push(cleanUpFn);
|
||||
|
||||
return cleanUpFn;
|
||||
}
|
||||
}
|
||||
|
||||
interface DefaultCurlOptions {
|
||||
defaultMethod?: CurlOptions['method'];
|
||||
defaultOptions?: CurlOptions['options'];
|
||||
defaultHeaders?: CurlOptions['headers'];
|
||||
defaultData?: CurlOptions['data'];
|
||||
defaultExtraPath?: CurlOptions['extraPath'];
|
||||
}
|
||||
|
||||
interface CurlOptions {
|
||||
method?: string;
|
||||
options?: string;
|
||||
headers?: string[];
|
||||
data?: any;
|
||||
url?: string;
|
||||
extraPath?: string;
|
||||
}
|
||||
|
||||
export function makeCurl(baseUrl: string, {
|
||||
defaultMethod = 'POST',
|
||||
defaultOptions = '',
|
||||
defaultHeaders = ['Content-Type: application/json'],
|
||||
defaultData = {},
|
||||
defaultExtraPath = '',
|
||||
}: DefaultCurlOptions = {}) {
|
||||
return function curl({
|
||||
method = defaultMethod,
|
||||
options = defaultOptions,
|
||||
headers = defaultHeaders,
|
||||
data = defaultData,
|
||||
url = baseUrl,
|
||||
extraPath = defaultExtraPath,
|
||||
}: CurlOptions) {
|
||||
const dataString = data ? JSON.stringify(data) : '';
|
||||
const cmd = `curl -iLX ${method} ` +
|
||||
`${options} ` +
|
||||
headers.map(header => `--header "${header}" `).join('') +
|
||||
`--data '${dataString}' ` +
|
||||
`${url}${extraPath}`;
|
||||
return helper.runCmd(cmd);
|
||||
};
|
||||
}
|
||||
|
||||
export interface PayloadData {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: number,
|
||||
build_parameters: {
|
||||
CIRCLE_JOB: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function payload(buildNum: number): PayloadData {
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: buildNum,
|
||||
build_parameters: { CIRCLE_JOB: 'aio_preview' },
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Exports
|
||||
export const helper = new Helper();
|
@ -1,6 +0,0 @@
|
||||
// Imports
|
||||
import {runTests} from '../common/run-tests';
|
||||
|
||||
// Run
|
||||
const specFiles = [`${__dirname}/**/*.e2e.js`];
|
||||
runTests(specFiles);
|
@ -1,7 +0,0 @@
|
||||
declare module jasmine {
|
||||
interface Matchers {
|
||||
toExistAsAFile(remove = true): boolean;
|
||||
toExistAsABuild(remove = true): boolean;
|
||||
toExistAsAnArtifact(remove = true): boolean;
|
||||
}
|
||||
}
|
@ -1,88 +0,0 @@
|
||||
import {sync as deleteEmpty} from 'delete-empty';
|
||||
import {existsSync, unlinkSync} from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {SHA} from './constants';
|
||||
import {helper} from './helper';
|
||||
|
||||
function checkFile(filePath: string, remove: boolean): boolean {
|
||||
const exists = existsSync(filePath);
|
||||
if (exists && remove) {
|
||||
// if we expected the file to exist then we remove it to prevent leftover file errors
|
||||
unlinkSync(filePath);
|
||||
}
|
||||
return exists;
|
||||
}
|
||||
|
||||
function getArtifactPath(prNum: number, sha: string = SHA): string {
|
||||
return `${AIO_DOWNLOADS_DIR}/${prNum}-${computeShortSha(sha)}-aio-snapshot.tgz`;
|
||||
}
|
||||
|
||||
function checkFiles(prNum: number, isPublic: boolean, sha: string, isLegacy: boolean, remove: boolean) {
|
||||
const files = ['/index.html', '/foo/bar.js'];
|
||||
const prPath = helper.getPrDir(prNum, isPublic);
|
||||
const shaPath = helper.getShaDir(prPath, sha, isLegacy);
|
||||
|
||||
const existingFiles: string[] = [];
|
||||
const missingFiles: string[] = [];
|
||||
files
|
||||
.map(file => join(shaPath, file))
|
||||
.forEach(file => (checkFile(file, remove) ? existingFiles : missingFiles).push(file));
|
||||
|
||||
deleteEmpty(prPath);
|
||||
|
||||
return { existingFiles, missingFiles };
|
||||
}
|
||||
|
||||
class ToExistAsAFile implements jasmine.CustomMatcher {
|
||||
public compare(actual: string, remove = true): jasmine.CustomMatcherResult {
|
||||
const pass = checkFile(actual, remove);
|
||||
return {
|
||||
message: `Expected file at "${actual}" ${pass ? 'not' : ''} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsAnArtifact implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, sha?: string}, remove = true): jasmine.CustomMatcherResult {
|
||||
const { prNum, sha = SHA } = actual;
|
||||
const filePath = getArtifactPath(prNum, sha);
|
||||
const pass = checkFile(filePath, remove);
|
||||
return {
|
||||
message: `Expected artifact "PR:${prNum}, SHA:${sha}, FILE:${filePath}" ${pass ? 'not' : '\b'} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsABuild implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}, remove = true):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const {missingFiles} = checkFiles(prNum, isPublic, sha, isLegacy, remove);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" to exist:\n` +
|
||||
missingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: missingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
public negativeCompare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const { existingFiles } = checkFiles(prNum, isPublic, sha, isLegacy, false);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" not to exist:\n` +
|
||||
existingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: existingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const customMatchers = {
|
||||
toExistAsABuild: () => new ToExistAsABuild(),
|
||||
toExistAsAFile: () => new ToExistAsAFile(),
|
||||
toExistAsAnArtifact: () => new ToExistAsAnArtifact(),
|
||||
};
|
@ -1,171 +0,0 @@
|
||||
/* tslint:disable:max-line-length */
|
||||
import * as nock from 'nock';
|
||||
import * as tar from 'tar-stream';
|
||||
import {gzipSync} from 'zlib';
|
||||
import {getEnvVar, Logger} from '../common/utils';
|
||||
import {BuildNums, PrNums, SHA} from './constants';
|
||||
|
||||
// We are using the `nock` library to fake responses from REST requests, when testing.
|
||||
// This is necessary, because the test preview-server runs as a separate node process to
|
||||
// the test harness, so we do not have direct access to the code (e.g. for mocking).
|
||||
// (See also 'lib/verify-setup/start-test-preview-server.ts'.)
|
||||
|
||||
// Each of the potential requests to an external API (e.g. Github or CircleCI) are mocked
|
||||
// below and return a suitable response. This is quite complicated to setup since the
|
||||
// response from, say, CircleCI will affect what request is made to, say, Github.
|
||||
|
||||
const logger = new Logger('mock-external-apis');
|
||||
|
||||
const log = (...args: any[]) => {
|
||||
// Filter out non-matching URL checks
|
||||
if (!/^matching.+: false$/.test(args[0])) {
|
||||
logger.log(...args);
|
||||
}
|
||||
};
|
||||
|
||||
const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
|
||||
const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||
|
||||
const ACTIVE_TRUSTED_USER = 'active-trusted-user';
|
||||
const INACTIVE_TRUSTED_USER = 'inactive-trusted-user';
|
||||
const UNTRUSTED_USER = 'untrusted-user';
|
||||
|
||||
const BASIC_BUILD_INFO = {
|
||||
branch: `pull/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
failed: false,
|
||||
reponame: AIO_GITHUB_REPO,
|
||||
username: AIO_GITHUB_ORGANIZATION,
|
||||
vcs_revision: SHA,
|
||||
};
|
||||
|
||||
const ISSUE_INFO_TRUSTED_LABEL = { labels: [{ name: AIO_TRUSTED_PR_LABEL }], user: { login: UNTRUSTED_USER } };
|
||||
const ISSUE_INFO_ACTIVE_TRUSTED_USER = { labels: [], user: { login: ACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_INACTIVE_TRUSTED_USER = { labels: [], user: { login: INACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_UNTRUSTED = { labels: [], user: { login: UNTRUSTED_USER } };
|
||||
const ACTIVE_STATE = { state: 'active' };
|
||||
const INACTIVE_STATE = { state: 'inactive' };
|
||||
|
||||
const TEST_TEAM_INFO = AIO_GITHUB_TEAM_SLUGS.map((slug, index) => ({ slug, id: index }));
|
||||
|
||||
const CIRCLE_CI_API_HOST = 'https://circleci.com';
|
||||
const CIRCLE_CI_TOKEN_PARAM = `circle-token=${AIO_CIRCLE_CI_TOKEN}`;
|
||||
const ARTIFACT_1 = { path: 'artifact-1', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-1`, _urlPath: '/artifacts/artifact-1' };
|
||||
const ARTIFACT_2 = { path: 'artifact-2', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-2`, _urlPath: '/artifacts/artifact-2' };
|
||||
const ARTIFACT_3 = { path: 'artifact-3', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-3`, _urlPath: '/artifacts/artifact-3' };
|
||||
const ARTIFACT_ERROR = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/error`, _urlPath: '/artifacts/error' };
|
||||
const ARTIFACT_404 = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/404`, _urlPath: '/artifacts/404' };
|
||||
const ARTIFACT_VALID_TRUSTED_USER = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/user`, _urlPath: '/artifacts/valid/user' };
|
||||
const ARTIFACT_VALID_TRUSTED_LABEL = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/label`, _urlPath: '/artifacts/valid/label' };
|
||||
const ARTIFACT_VALID_UNTRUSTED = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/untrusted`, _urlPath: '/artifacts/valid/untrusted' };
|
||||
|
||||
const CIRCLE_CI_BUILD_INFO_URL = `/api/v1.1/project/github/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}`;
|
||||
|
||||
const buildInfoUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildArtifactsUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}/artifacts?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildInfo = (prNum: number) => ({ ...BASIC_BUILD_INFO, branch: `pull/${prNum}` });
|
||||
|
||||
const GITHUB_API_HOST = 'https://api.github.com';
|
||||
const GITHUB_ISSUES_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/issues`;
|
||||
const GITHUB_PULLS_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/pulls`;
|
||||
const GITHUB_TEAMS_URL = `/orgs/${AIO_GITHUB_ORGANIZATION}/teams`;
|
||||
|
||||
const getIssueUrl = (prNum: number) => `${GITHUB_ISSUES_URL}/${prNum}`;
|
||||
const getFilesUrl = (prNum: number, pageNum = 1) => `${GITHUB_PULLS_URL}/${prNum}/files?page=${pageNum}&per_page=100`;
|
||||
const getCommentUrl = (prNum: number) => `${getIssueUrl(prNum)}/comments`;
|
||||
const getTeamMembershipUrl = (teamId: number, username: string) => `/teams/${teamId}/memberships/${username}`;
|
||||
|
||||
const createArchive = (buildNum: number, prNum: number, sha: string) => {
|
||||
logger.log('createArchive', buildNum, prNum, sha);
|
||||
const pack = tar.pack();
|
||||
pack.entry({name: 'index.html'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /index.html`);
|
||||
pack.entry({name: 'foo/bar.js'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /foo/bar.js`);
|
||||
pack.finalize();
|
||||
const zip = gzipSync(pack.read());
|
||||
return zip;
|
||||
};
|
||||
|
||||
// Create request scopes
|
||||
const circleCiApi = nock(CIRCLE_CI_API_HOST).log(log).persist();
|
||||
const githubApi = nock(GITHUB_API_HOST).log(log).persist().matchHeader('Authorization', `token ${AIO_GITHUB_TOKEN}`);
|
||||
|
||||
//////////////////////////////
|
||||
|
||||
// GENERAL responses
|
||||
githubApi.get(GITHUB_TEAMS_URL + '?page=1&per_page=100').reply(200, TEST_TEAM_INFO);
|
||||
githubApi.post(getCommentUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200);
|
||||
|
||||
// BUILD_INFO errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_ERROR)).replyWithError('BUILD_INFO_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_404)).reply(404, 'BUILD_INFO_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_BUILD_FAILED)).reply(200, { ...BASIC_BUILD_INFO, failed: true });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_ORG)).reply(200, { ...BASIC_BUILD_INFO, username: 'bad' });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_REPO)).reply(200, { ...BASIC_BUILD_INFO, reponame: 'bad' });
|
||||
|
||||
// CHANGED FILE errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_ERROR)).reply(200, buildInfo(PrNums.CHANGED_FILES_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_ERROR)).replyWithError('CHANGED_FILES_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_404)).reply(200, buildInfo(PrNums.CHANGED_FILES_404));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_404)).reply(404, 'CHANGED_FILES_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_NONE)).reply(200, buildInfo(PrNums.CHANGED_FILES_NONE));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_NONE)).reply(200, []);
|
||||
|
||||
// ARTIFACT URL errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).replyWithError('BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(404, 'BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, []);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, [ARTIFACT_1, ARTIFACT_2, ARTIFACT_3]);
|
||||
|
||||
// ARTIFACT DOWNLOAD errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, [ARTIFACT_ERROR]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).replyWithError(ARTIFACT_ERROR._urlPath);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, [ARTIFACT_404]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).reply(404, ARTIFACT_ERROR._urlPath);
|
||||
|
||||
// TRUST CHECK errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ERROR)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ERROR)).replyWithError('TRUST_CHECK_ERROR');
|
||||
|
||||
// ACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_USER._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_ACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// TRUSTED LABEL response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [ARTIFACT_VALID_TRUSTED_LABEL]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_LABEL._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_TRUSTED_LABEL, PrNums.TRUST_CHECK_TRUSTED_LABEL, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, ISSUE_INFO_TRUSTED_LABEL);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// INACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_INACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, INACTIVE_TRUSTED_USER)).reply(200, INACTIVE_STATE);
|
||||
|
||||
// UNTRUSTED reponse
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, buildInfo(PrNums.TRUST_CHECK_UNTRUSTED));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, [ARTIFACT_VALID_UNTRUSTED]);
|
||||
circleCiApi.get(ARTIFACT_VALID_UNTRUSTED._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_UNTRUSTED, PrNums.TRUST_CHECK_UNTRUSTED, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, ISSUE_INFO_UNTRUSTED);
|
||||
githubApi.get(getTeamMembershipUrl(0, UNTRUSTED_USER)).reply(404);
|
@ -1,395 +0,0 @@
|
||||
// Imports
|
||||
import * as path from 'path';
|
||||
import {rm} from 'shelljs';
|
||||
import {AIO_BUILDS_DIR, AIO_NGINX_HOSTNAME, AIO_NGINX_PORT_HTTP, AIO_NGINX_PORT_HTTPS} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {PrNums} from './constants';
|
||||
import {helper as h} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe(`nginx`, () => {
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
it('should redirect HTTP to HTTPS', async () => {
|
||||
const httpHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTP}`;
|
||||
const httpsHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTPS}`;
|
||||
const urlMap = {
|
||||
[`http://${httpHost}/`]: `https://${httpsHost}/`,
|
||||
[`http://${httpHost}/foo`]: `https://${httpsHost}/foo`,
|
||||
[`http://foo.${httpHost}/`]: `https://foo.${httpsHost}/`,
|
||||
};
|
||||
|
||||
const verifyRedirection = async (fromUrl: string, toUrl: string) => {
|
||||
const result = await h.runCmd(`curl -i ${fromUrl}`);
|
||||
h.verifyResponse(307)(result);
|
||||
|
||||
const headers = result.stdout.split(/(?:\r?\n){2,}/)[0];
|
||||
expect(headers).toContain(`Location: ${toUrl}`);
|
||||
};
|
||||
|
||||
await Promise.all(Object.entries(urlMap).map(urls => verifyRedirection(...urls)));
|
||||
});
|
||||
|
||||
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = 9;
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
const shortSha9 = computeShortSha(sha9);
|
||||
const shortSha0 = computeShortSha(sha0);
|
||||
|
||||
|
||||
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
||||
|
||||
describe('(for public builds)', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyBuild(pr, sha9);
|
||||
h.createDummyBuild(pr, sha0);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
expect({ prNum: pr, sha: sha9 }).toExistAsABuild();
|
||||
expect({ prNum: pr, sha: sha0 }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js', async () => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex));
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(h.verifyResponse(200, bodyRegex));
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 403 for directories', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths to files', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404));
|
||||
});
|
||||
|
||||
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown PRs/SHAs', async () => {
|
||||
const otherPr = 54321;
|
||||
const otherShortSha = computeShortSha('8'.repeat(40));
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}9.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherShortSha}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the subdomain format is wrong', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://p${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://r${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404));
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', async () => {
|
||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('(for hidden builds)', () => {
|
||||
|
||||
it('should respond with 404 for any file or directory', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for any file or directory (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false, false, true);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const baseUrl = `${scheme}://${host}/can-have-public-preview`;
|
||||
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX POST ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX GET ${baseUrl}/${PrNums.CHANGED_FILES_ERROR}`).
|
||||
then(h.verifyResponse(500, /CHANGED_FILES_ERROR/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const cmdPrefix = `curl -iLX GET ${baseUrl}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}-foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}nfoo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/42/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/f00`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const url = `${scheme}://${host}/circle-build`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
|
||||
then(h.verifyResponse(400, /Incorrect body content. Expected JSON/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncircle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build-foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-buildnfoo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/pr`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/42`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const url = `${scheme}://${host}/pr-updated`;
|
||||
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST --header "Content-Type: application/json" ${url}`).
|
||||
then(h.verifyResponse(400, /Missing or empty 'number' field/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
beforeEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond with 404 for unknown URLs (even if the resource exists)', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html', 'foo'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
rm('-r', absFilePath);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
@ -1,568 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_PREVIEW_SERVER_HOSTNAME, AIO_PREVIEW_SERVER_PORT, AIO_WWW_USER} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA, SIMILAR_SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe('preview-server', () => {
|
||||
const hostname = AIO_PREVIEW_SERVER_HOSTNAME;
|
||||
const port = AIO_PREVIEW_SERVER_PORT;
|
||||
const host = `http://${hostname}:${port}`;
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const curl = makeCurl(`${host}/can-have-public-preview`, {
|
||||
defaultData: null,
|
||||
defaultExtraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
defaultHeaders: [],
|
||||
defaultMethod: 'GET',
|
||||
});
|
||||
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'POST'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `-foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `nfoo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}/foo`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/f00'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking for significant file changes fails', async () => {
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_404}`}).then(h.verifyResponse(500, /CHANGED_FILES_404/)),
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_ERROR}`}).then(h.verifyResponse(500, /CHANGED_FILES_ERROR/)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if no significant files were touched', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'No significant files touched.',
|
||||
});
|
||||
|
||||
await curl({extraPath: `/${PrNums.CHANGED_FILES_NONE}`}).then(h.verifyResponse(200, expectedResponse));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking "trusted" status fails', async () => {
|
||||
await curl({extraPath: `/${PrNums.TRUST_CHECK_ERROR}`}).then(h.verifyResponse(500, 'TRUST_CHECK_ERROR'));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if the PR is not automatically verifiable as "trusted"', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'Not automatically verifiable as \\"trusted\\".',
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_UNTRUSTED}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (true) if the PR can have a public preview', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: true,
|
||||
reason: null,
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_TRUSTED_LABEL}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
const curl = makeCurl(`${host}/circle-build`);
|
||||
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
await Promise.all([
|
||||
curl({url: `${host}/foo/circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/foo-circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/fooncircle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build-foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-buildnfoo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/pr`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build42`}).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 400 if the body is not valid', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: {} } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1 } } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1, build_parameters: {} } } }).then(h.verifyResponse(400)),
|
||||
curl(payload(0)).then(h.verifyResponse(400)),
|
||||
curl(payload(-1)).then(h.verifyResponse(400)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI API request errors', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_INFO_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if the build on CircleCI failed', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_BUILD_FAILED)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github org from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_ORG)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github repo from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_REPO)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github files API errors', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.CHANGED_FILES_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if no significant files are changed by the PR', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_NONE)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI artifact API fails', async () => {
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_404)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_EMPTY)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_MISSING)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if fetching the artifact errors', async () => {
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the GH trusted API fails', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should respond with 201 if a new public build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).then(h.verifyResponse(201));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should respond with 202 if a new private build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_UNTRUSTED)).then(h.verifyResponse(202));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_UNTRUSTED, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
[true, false].forEach(isPublic => {
|
||||
const build = isPublic ? BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const prNum = isPublic ? PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
const label = isPublic ? 'public' : 'non-public';
|
||||
const overwriteRe = RegExp(`^Request to overwrite existing ${label} directory`);
|
||||
const statusCode = isPublic ? 201 : 202;
|
||||
|
||||
describe(`for ${label} builds`, () => {
|
||||
|
||||
it('should extract the contents of the build artifact', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /index.html`);
|
||||
expect(h.readBuildFile(prNum, SHA, 'foo/bar.js', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /foo/bar.js`);
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should create files/directories owned by '${AIO_WWW_USER}'`, async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
const { stdout: allFiles } = await h.runCmd(`find ${shaDir}`);
|
||||
const { stdout: userFiles } = await h.runCmd(`find ${shaDir} -user ${AIO_WWW_USER}`);
|
||||
|
||||
expect(userFiles).toBe(allFiles);
|
||||
expect(userFiles).toContain(shaDir);
|
||||
expect(userFiles).toContain(join(shaDir, 'index.html'));
|
||||
expect(userFiles).toContain(join(shaDir, 'foo', 'bar.js'));
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should delete the build artifact file', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, SHA }).not.toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should make the build directory non-writable', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||
const isNotWritable = (fileOrDir: string) => {
|
||||
const mode = fs.statSync(fileOrDir).mode;
|
||||
// tslint:disable-next-line: no-bitwise
|
||||
return !(mode & parseInt('222', 8));
|
||||
};
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
expect(isNotWritable(shaDir)).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'index.html'))).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'foo', 'bar.js'))).toBe(true);
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)',
|
||||
async () => {
|
||||
// It is possible that 40-chars long build directories exist, if they had been deployed
|
||||
// before implementing the shorter build directory names. In that case, we don't want the
|
||||
// second (shorter) name to be considered the same as the old one (even if they originate
|
||||
// from the same SHA).
|
||||
|
||||
h.createDummyBuild(prNum, SHA, isPublic, false, true);
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic, true);
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, false)).toContain('index.html');
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: false }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds`, async () => {
|
||||
// setup a build already in place
|
||||
h.createDummyBuild(prNum, SHA, isPublic);
|
||||
// distinguish this build from the downloaded one
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds (even if the SHA is different)`, async () => {
|
||||
// Since only the first few characters of the SHA are used, it is possible for two different
|
||||
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
|
||||
// overwrite the first.
|
||||
expect(SIMILAR_SHA).not.toEqual(SHA);
|
||||
expect(computeShortSha(SIMILAR_SHA)).toEqual(computeShortSha(SHA));
|
||||
h.createDummyBuild(prNum, SIMILAR_SHA, isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toContain('index.html');
|
||||
h.writeBuildFile(prNum, SIMILAR_SHA, 'index.html', 'My content', isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic, sha: SIMILAR_SHA }).toExistAsABuild();
|
||||
expect({ prNum, sha: SIMILAR_SHA }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should only delete the SHA directory on error (for existing PR)', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
describe('when the PR\'s visibility has changed', () => {
|
||||
|
||||
it('should update the PR\'s visibility', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds (but keep the updated visibility)', async () => {
|
||||
h.createDummyBuild(prNum, SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic: !isPublic }).not.toExistAsABuild();
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
|
||||
it('should reject the request if it fails to update the PR\'s visibility', async () => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
|
||||
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(prNum, !isPublic)}' ` +
|
||||
`to existing directory '${h.getPrDir(prNum, isPublic)}'.`);
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, errorRegex));
|
||||
|
||||
expect({ prNum, isPublic }).not.toExistAsABuild();
|
||||
|
||||
// The bad folders should have been deleted
|
||||
expect({ prNum, sha: ALT_SHA, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, sha: ALT_SHA, isPublic: !isPublic }).toExistAsABuild();
|
||||
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const curl = makeCurl(`${host}/pr-updated`);
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a payload', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a \'number\' field', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: { number: null} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests for which checking the PR visibility fails', async () => {
|
||||
await curl({ data: { number: PrNums.TRUST_CHECK_ERROR } }).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const mockPayload = JSON.stringify({number: 1}); // MockExternalApiFlags.TRUST_CHECK_ACTIVE_TRUSTED_USER });
|
||||
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" ${host}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if PR\'s visibility is already up-to-date', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is already public.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild(remove);
|
||||
// Hidden build is already hidden.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if \'action\' implies no visibility change', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is hidden atm.
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
// Hidden build is public atm.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
describe('when the visiblity has changed', () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create initial PR builds with opposite visibilities as the ones that will be reported:
|
||||
// - The now public PR was previously hidden.
|
||||
// - The now hidden PR was previously public.
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(false);
|
||||
});
|
||||
afterEach(() => {
|
||||
// Expect PRs' visibility to have been updated:
|
||||
// - The public PR should be actually public (previously it was hidden).
|
||||
// - The hidden PR should be actually hidden (previously it was public).
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: undefined)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: labeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: unlabeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for requests to unknown URLs', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PUT ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
@ -1,269 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_NGINX_HOSTNAME} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const host = `${hostname}:${port}`;
|
||||
const curlPrUpdated = makeCurl(`${scheme}://${host}/pr-updated`);
|
||||
|
||||
const getFile = (pr: number, sha: string, file: string) =>
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${computeShortSha(sha)}.${host}/${file}`);
|
||||
const prUpdated = (prNum: number, action?: string) => curlPrUpdated({ data: { number: prNum, action } });
|
||||
const circleBuild = makeCurl(`${scheme}://${host}/circle-build`);
|
||||
|
||||
beforeEach(() => {
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000;
|
||||
jasmine.addMatchers(customMatchers);
|
||||
});
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe('for a new/non-existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to notify that a PR has been updated (and do nothing)', async () => {
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(200));
|
||||
// The PR should still not exist.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('for an existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix1 = `^PR: ${PR} \\| SHA: ${ALT_SHA} \\| File:`;
|
||||
const idxContentRegex1 = new RegExp(`${regexPrefix1} \\/index\\.html$`);
|
||||
const barContentRegex1 = new RegExp(`${regexPrefix1} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix2 = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex2 = new RegExp(`${regexPrefix2} \\/index\\.html$`);
|
||||
const barContentRegex2 = new RegExp(`${regexPrefix2} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex1)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex1)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex2)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex2)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: SHA, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix = `^PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(PR, SHA);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
await Promise.all([
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
h.createDummyBuild(PR, SHA, false);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if outdated)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
|
||||
// PR visibilities are outdated (i.e. the opposte of what the should).
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities should have been updated.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if up-to-date)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
|
||||
// PR visibilities are already up-to-date.
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities are still up-to-date.
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if re-checking visibility fails', async () => {
|
||||
const errorPr = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
h.createDummyBuild(errorPr, SHA, true);
|
||||
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await prUpdated(errorPr).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if updating visibility fails', async () => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, false);
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, true);
|
||||
|
||||
const hiddenPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, false);
|
||||
const publicPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, true);
|
||||
const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`);
|
||||
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(409, bodyRegex));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
@ -1,2 +0,0 @@
|
||||
import '../preview-server';
|
||||
import './mock-external-apis';
|
@ -1,30 +0,0 @@
|
||||
declare module 'tar-stream' {
|
||||
|
||||
import {Readable, Writable} from 'stream';
|
||||
|
||||
export interface Pack extends Readable {
|
||||
entry(header: Header, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, contents: string, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: Buffer, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: string|Buffer, callback?: (err?: any) => {}): Writable;
|
||||
finalize();
|
||||
destroy(err: any);
|
||||
}
|
||||
|
||||
export interface Header {
|
||||
name: string;
|
||||
mode?: number;
|
||||
uid?: number;
|
||||
gid?: number;
|
||||
size?: number;
|
||||
mtime?: Date;
|
||||
type?: type;
|
||||
linkname?: string;
|
||||
uname?: string;
|
||||
gname?: string;
|
||||
devmajor?: number;
|
||||
devminor?: number;
|
||||
}
|
||||
|
||||
export function pack(): Pack;
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
{
|
||||
"name": "aio-scripts-js",
|
||||
"version": "1.0.0",
|
||||
"description": "Performing various tasks on PR build artifacts for angular.io.",
|
||||
"repository": "https://github.com/angular/angular.git",
|
||||
"author": "Angular",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prebuild": "yarn clean-dist",
|
||||
"build": "yarn ~~build",
|
||||
"prebuild-watch": "yarn prebuild",
|
||||
"build-watch": "yarn ~~build-watch",
|
||||
"clean-dist": "node --eval \"require('shelljs').rm('-rf', 'dist')\"",
|
||||
"predev": "yarn build || true",
|
||||
"dev": "run-p ~~build-watch ~~test-watch",
|
||||
"lint": "tslint --project tsconfig.json",
|
||||
"pretest": "run-s build lint",
|
||||
"test": "yarn ~~test-only",
|
||||
"pretest-watch": "yarn pretest",
|
||||
"test-watch": "yarn ~~test-watch",
|
||||
"~~build": "tsc",
|
||||
"~~build-watch": "yarn ~~build --watch",
|
||||
"~~test-only": "node dist/test",
|
||||
"~~test-watch": "nodemon --delay 1 --exec \"yarn ~~test-only\" --watch dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"delete-empty": "^3.0.0",
|
||||
"express": "^4.17.1",
|
||||
"jasmine": "^3.5.0",
|
||||
"nock": "^12.0.3",
|
||||
"node-fetch": "^2.6.0",
|
||||
"shelljs": "^0.8.4",
|
||||
"source-map-support": "^0.5.19",
|
||||
"tar-stream": "^2.1.2",
|
||||
"tslib": "^1.11.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/body-parser": "^1.19.0",
|
||||
"@types/express": "^4.17.6",
|
||||
"@types/jasmine": "^3.5.10",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^13.13.2",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
"@types/shelljs": "^0.8.7",
|
||||
"@types/supertest": "^2.0.8",
|
||||
"nodemon": "^2.0.3",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"supertest": "^4.0.2",
|
||||
"tslint": "^6.1.1",
|
||||
"tslint-jasmine-noSkipOrFocus": "^1.0.9",
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
}
|
@ -1,433 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import {normalize} from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
|
||||
import {HIDDEN_DIR_PREFIX} from '../../lib/common/constants';
|
||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||
import {Logger} from '../../lib/common/utils';
|
||||
|
||||
const EXISTING_BUILDS = [10, 20, 30, 40];
|
||||
const EXISTING_DOWNLOADS = [
|
||||
'10-ABCDEF0-build.zip',
|
||||
'10-1234567-build.zip',
|
||||
'20-ABCDEF0-build.zip',
|
||||
'20-1234567-build.zip',
|
||||
];
|
||||
const OPEN_PRS = [10, 40];
|
||||
|
||||
// Tests
|
||||
describe('BuildCleaner', () => {
|
||||
let loggerErrorSpy: jasmine.Spy;
|
||||
let loggerLogSpy: jasmine.Spy;
|
||||
let cleaner: BuildCleaner;
|
||||
|
||||
beforeEach(() => {
|
||||
loggerErrorSpy = spyOn(Logger.prototype, 'error');
|
||||
loggerLogSpy = spyOn(Logger.prototype, 'log');
|
||||
cleaner = new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', '/downloads', 'build.zip');
|
||||
});
|
||||
|
||||
describe('constructor()', () => {
|
||||
|
||||
it('should throw if \'buildsDir\' is empty', () => {
|
||||
expect(() => new BuildCleaner('', 'baz', 'qux', '12345', 'downloads', 'build.zip')).
|
||||
toThrowError('Missing or empty required parameter \'buildsDir\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubOrg\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', '', 'qux', '12345', 'downloads', 'build.zip')).
|
||||
toThrowError('Missing or empty required parameter \'githubOrg\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubRepo\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', 'baz', '', '12345', 'downloads', 'build.zip')).
|
||||
toThrowError('Missing or empty required parameter \'githubRepo\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'githubToken\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '', 'downloads', 'build.zip')).
|
||||
toThrowError('Missing or empty required parameter \'githubToken\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'downloadsDir\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', '', 'build.zip')).
|
||||
toThrowError('Missing or empty required parameter \'downloadsDir\'!');
|
||||
});
|
||||
|
||||
|
||||
it('should throw if \'artifactPath\' is empty', () => {
|
||||
expect(() => new BuildCleaner('/foo/bar', 'baz', 'qux', '12345', 'downloads', '')).
|
||||
toThrowError('Missing or empty required parameter \'artifactPath\'!');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('cleanUp()', () => {
|
||||
let cleanerGetExistingBuildNumbersSpy: jasmine.Spy;
|
||||
let cleanerGetOpenPrNumbersSpy: jasmine.Spy;
|
||||
let cleanerGetExistingDownloadsSpy: jasmine.Spy;
|
||||
let cleanerRemoveUnnecessaryBuildsSpy: jasmine.Spy;
|
||||
let cleanerRemoveUnnecessaryDownloadsSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner, 'getExistingBuildNumbers').and.resolveTo(EXISTING_BUILDS);
|
||||
cleanerGetOpenPrNumbersSpy = spyOn(cleaner, 'getOpenPrNumbers').and.resolveTo(OPEN_PRS);
|
||||
cleanerGetExistingDownloadsSpy = spyOn(cleaner, 'getExistingDownloads').and.resolveTo(EXISTING_DOWNLOADS);
|
||||
|
||||
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner, 'removeUnnecessaryBuilds');
|
||||
cleanerRemoveUnnecessaryDownloadsSpy = spyOn(cleaner, 'removeUnnecessaryDownloads');
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', async () => {
|
||||
const promise = cleaner.cleanUp();
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
|
||||
// Do not complete the test and release the spies synchronously, to avoid running the actual implementations.
|
||||
await promise;
|
||||
});
|
||||
|
||||
|
||||
it('should get the open PRs', async () => {
|
||||
await cleaner.cleanUp();
|
||||
expect(cleanerGetOpenPrNumbersSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should get the existing builds', async () => {
|
||||
await cleaner.cleanUp();
|
||||
expect(cleanerGetExistingBuildNumbersSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should get the existing downloads', async () => {
|
||||
await cleaner.cleanUp();
|
||||
expect(cleanerGetExistingDownloadsSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should pass existing builds and open PRs to \'removeUnnecessaryBuilds()\'', async () => {
|
||||
await cleaner.cleanUp();
|
||||
expect(cleanerRemoveUnnecessaryBuildsSpy).toHaveBeenCalledWith(EXISTING_BUILDS, OPEN_PRS);
|
||||
});
|
||||
|
||||
|
||||
it('should pass existing downloads and open PRs to \'removeUnnecessaryDownloads()\'', async () => {
|
||||
await cleaner.cleanUp();
|
||||
expect(cleanerRemoveUnnecessaryDownloadsSpy).toHaveBeenCalledWith(EXISTING_DOWNLOADS, OPEN_PRS);
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getOpenPrNumbers()\' rejects', async () => {
|
||||
cleanerGetOpenPrNumbersSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getExistingBuildNumbers()\' rejects', async () => {
|
||||
cleanerGetExistingBuildNumbersSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'getExistingDownloads()\' rejects', async () => {
|
||||
cleanerGetExistingDownloadsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'removeUnnecessaryBuilds()\' rejects', async () => {
|
||||
cleanerRemoveUnnecessaryBuildsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if \'removeUnnecessaryDownloads()\' rejects', async () => {
|
||||
cleanerRemoveUnnecessaryDownloadsSpy.and.rejectWith('Test');
|
||||
await expectAsync(cleaner.cleanUp()).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getExistingBuildNumbers()', () => {
|
||||
let fsReaddirSpy: jasmine.Spy;
|
||||
let readdirCb: (err: any, files?: string[]) => void;
|
||||
let promise: Promise<number[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake(
|
||||
((_: string, cb: typeof readdirCb) => readdirCb = cb) as unknown as typeof fs.readdir,
|
||||
);
|
||||
promise = cleaner.getExistingBuildNumbers();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
|
||||
it('should get the contents of the builds directory', () => {
|
||||
expect(fsReaddirSpy).toHaveBeenCalled();
|
||||
expect(fsReaddirSpy.calls.argsFor(0)[0]).toBe('/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while getting the files', async () => {
|
||||
readdirCb('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned files (as numbers)', async () => {
|
||||
readdirCb(null, ['12', '34', '56']);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
|
||||
it('should remove `HIDDEN_DIR_PREFIX` from the filenames', async () => {
|
||||
readdirCb(null, [`${HIDDEN_DIR_PREFIX}12`, '34', `${HIDDEN_DIR_PREFIX}56`]);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore files with non-numeric (or zero) names', async () => {
|
||||
readdirCb(null, ['12', 'foo', '34', 'bar', '56', '000']);
|
||||
await expectAsync(promise).toBeResolvedTo([12, 34, 56]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getOpenPrNumbers()', () => {
|
||||
let prDeferred: {resolve: (v: any) => void, reject: (v: any) => void};
|
||||
let promise: Promise<number[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
spyOn(GithubPullRequests.prototype, 'fetchAll').and.callFake(() => {
|
||||
return new Promise((resolve, reject) => prDeferred = {resolve, reject});
|
||||
});
|
||||
|
||||
promise = cleaner.getOpenPrNumbers();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
|
||||
it('should fetch open PRs via \'GithubPullRequests\'', () => {
|
||||
expect(GithubPullRequests.prototype.fetchAll).toHaveBeenCalledWith('open');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while fetching PRs', async () => {
|
||||
prDeferred.reject('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the numbers of the fetched PRs', async () => {
|
||||
prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]);
|
||||
await expectAsync(promise).toBeResolvedTo([1, 2, 3]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('getExistingDownloads()', () => {
|
||||
let fsReaddirSpy: jasmine.Spy;
|
||||
let readdirCb: (err: any, files?: string[]) => void;
|
||||
let promise: Promise<string[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake(
|
||||
((_: string, cb: typeof readdirCb) => readdirCb = cb) as unknown as typeof fs.readdir,
|
||||
);
|
||||
promise = cleaner.getExistingDownloads();
|
||||
});
|
||||
|
||||
|
||||
it('should return a promise', () => {
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
|
||||
it('should get the contents of the downloads directory', () => {
|
||||
expect(fsReaddirSpy).toHaveBeenCalled();
|
||||
expect(fsReaddirSpy.calls.argsFor(0)[0]).toBe('/downloads');
|
||||
});
|
||||
|
||||
|
||||
it('should reject if an error occurs while getting the files', async () => {
|
||||
readdirCb('Test');
|
||||
await expectAsync(promise).toBeRejectedWith('Test');
|
||||
});
|
||||
|
||||
|
||||
it('should resolve with the returned file names', async () => {
|
||||
readdirCb(null, EXISTING_DOWNLOADS);
|
||||
await expectAsync(promise).toBeResolvedTo(EXISTING_DOWNLOADS);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore files that do not match the artifactPath', async () => {
|
||||
readdirCb(null, ['10-ABCDEF-build.zip', '20-AAAAAAA-otherfile.zip', '30-FFFFFFF-build.zip']);
|
||||
await expectAsync(promise).toBeResolvedTo(['10-ABCDEF-build.zip', '30-FFFFFFF-build.zip']);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('removeDir()', () => {
|
||||
let shellChmodSpy: jasmine.Spy;
|
||||
let shellRmSpy: jasmine.Spy;
|
||||
let shellTestSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
shellChmodSpy = spyOn(shell, 'chmod');
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
shellTestSpy = spyOn(shell, 'test').and.returnValue(true);
|
||||
});
|
||||
|
||||
|
||||
it('should test if the directory exists (and return if it does not)', () => {
|
||||
shellTestSpy.and.returnValue(false);
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
expect(shellTestSpy).toHaveBeenCalledWith('-d', '/foo/bar');
|
||||
expect(shellChmodSpy).not.toHaveBeenCalled();
|
||||
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should remove the specified directory and its content', () => {
|
||||
cleaner.removeDir('/foo/bar');
|
||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', '/foo/bar');
|
||||
});
|
||||
|
||||
|
||||
it('should make the directory and its content writable before removing', () => {
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
expect(shellChmodSpy).toHaveBeenCalledBefore(shellRmSpy);
|
||||
expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar');
|
||||
expect(shellRmSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
||||
it('should catch errors and log them', () => {
|
||||
shellRmSpy.and.throwError('Test');
|
||||
cleaner.removeDir('/foo/bar');
|
||||
|
||||
expect(loggerErrorSpy).toHaveBeenCalledWith('ERROR: Unable to remove \'/foo/bar\' due to:', new Error('Test'));
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('removeUnnecessaryBuilds()', () => {
|
||||
let cleanerRemoveDirSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
cleanerRemoveDirSpy = spyOn(cleaner, 'removeDir');
|
||||
});
|
||||
|
||||
|
||||
it('should log the number of existing builds and builds to be removed', () => {
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3], [3, 4, 5, 6]);
|
||||
|
||||
expect(loggerLogSpy).toHaveBeenCalledWith('Existing builds: 3');
|
||||
expect(loggerLogSpy).toHaveBeenCalledWith('Removing 2 build(s): 1, 2');
|
||||
});
|
||||
|
||||
|
||||
it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3], []);
|
||||
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/2'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
|
||||
});
|
||||
|
||||
|
||||
it('should try removing hidden directories as well', () => {
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3], []);
|
||||
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||
});
|
||||
|
||||
|
||||
it('should remove the builds that do not correspond to open PRs', () => {
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0);
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
|
||||
cleaner.removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(8);
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/1'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/2'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/3'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize('/foo/bar/4'));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}4`));
|
||||
cleanerRemoveDirSpy.calls.reset();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('removeUnnecessaryDownloads()', () => {
|
||||
let shellRmSpy: jasmine.Spy;
|
||||
|
||||
beforeEach(() => {
|
||||
shellRmSpy = spyOn(shell, 'rm');
|
||||
});
|
||||
|
||||
|
||||
it('should log the number of existing downloads and downloads to be removed', () => {
|
||||
cleaner.removeUnnecessaryDownloads(EXISTING_DOWNLOADS, OPEN_PRS);
|
||||
|
||||
expect(loggerLogSpy).toHaveBeenCalledWith('Existing downloads: 4');
|
||||
expect(loggerLogSpy).toHaveBeenCalledWith('Removing 2 download(s): 20-ABCDEF0-build.zip, 20-1234567-build.zip');
|
||||
});
|
||||
|
||||
|
||||
it('should construct full paths to directories (by prepending \'downloadsDir\')', () => {
|
||||
cleaner.removeUnnecessaryDownloads(['dl-1', 'dl-2', 'dl-3'], []);
|
||||
|
||||
expect(shellRmSpy).toHaveBeenCalledWith(normalize('/downloads/dl-1'));
|
||||
expect(shellRmSpy).toHaveBeenCalledWith(normalize('/downloads/dl-2'));
|
||||
expect(shellRmSpy).toHaveBeenCalledWith(normalize('/downloads/dl-3'));
|
||||
});
|
||||
|
||||
|
||||
it('should remove the downloads that do not correspond to open PRs', () => {
|
||||
cleaner.removeUnnecessaryDownloads(EXISTING_DOWNLOADS, OPEN_PRS);
|
||||
expect(shellRmSpy).toHaveBeenCalledTimes(2);
|
||||
expect(shellRmSpy).toHaveBeenCalledWith(normalize('/downloads/20-ABCDEF0-build.zip'));
|
||||
expect(shellRmSpy).toHaveBeenCalledWith(normalize('/downloads/20-1234567-build.zip'));
|
||||
});
|
||||
|
||||
});
|
||||
});
|
@ -1,108 +0,0 @@
|
||||
import * as nock from 'nock';
|
||||
import {CircleCiApi} from '../../lib/common/circle-ci-api';
|
||||
|
||||
const ORG = 'testorg';
|
||||
const REPO = 'testrepo';
|
||||
const TOKEN = 'xxxx';
|
||||
const BASE_URL = `https://circleci.com/api/v1.1/project/github/${ORG}/${REPO}`;
|
||||
|
||||
describe('CircleCIApi', () => {
|
||||
describe('constructor()', () => {
|
||||
it('should throw if \'githubOrg\' is missing or empty', () => {
|
||||
expect(() => new CircleCiApi('', REPO, TOKEN)).
|
||||
toThrowError('Missing or empty required parameter \'githubOrg\'!');
|
||||
});
|
||||
|
||||
it('should throw if \'githubRepo\' is missing or empty', () => {
|
||||
expect(() => new CircleCiApi(ORG, '', TOKEN)).
|
||||
toThrowError('Missing or empty required parameter \'githubRepo\'!');
|
||||
});
|
||||
|
||||
it('should throw if \'circleCiToken\' is missing or empty', () => {
|
||||
expect(() => new CircleCiApi(ORG, REPO, '')).
|
||||
toThrowError('Missing or empty required parameter \'circleCiToken\'!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBuildInfo', () => {
|
||||
it('should make a request to the CircleCI API for the given build number', async () => {
|
||||
const api = new CircleCiApi(ORG, REPO, TOKEN);
|
||||
const buildNum = 12345;
|
||||
const expectedBuildInfo: any = { org: ORG, repo: REPO, build_num: buildNum };
|
||||
|
||||
const request = nock(BASE_URL)
|
||||
.get(`/${buildNum}?circle-token=${TOKEN}`)
|
||||
.reply(200, expectedBuildInfo);
|
||||
|
||||
const buildInfo = await api.getBuildInfo(buildNum);
|
||||
expect(buildInfo).toEqual(expectedBuildInfo);
|
||||
request.done();
|
||||
});
|
||||
|
||||
it('should throw an error if the request fails', async () => {
|
||||
const api = new CircleCiApi(ORG, REPO, TOKEN);
|
||||
const buildNum = 12345;
|
||||
const errorMessage = 'Invalid request';
|
||||
const request = nock(BASE_URL).get(`/${buildNum}?circle-token=${TOKEN}`);
|
||||
|
||||
request.replyWithError(errorMessage);
|
||||
await expectAsync(api.getBuildInfo(buildNum)).toBeRejectedWithError(
|
||||
`CircleCI build info request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
|
||||
request.reply(404, errorMessage);
|
||||
await expectAsync(api.getBuildInfo(buildNum)).toBeRejectedWithError(
|
||||
`CircleCI build info request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBuildArtifactUrl', () => {
|
||||
it('should make a request to the CircleCI API for the given build number', async () => {
|
||||
const api = new CircleCiApi(ORG, REPO, TOKEN);
|
||||
const buildNum = 12345;
|
||||
const artifact0: any = { path: 'some/path/0', url: 'https://url/0' };
|
||||
const artifact1: any = { path: 'some/path/1', url: 'https://url/1' };
|
||||
const artifact2: any = { path: 'some/path/2', url: 'https://url/2' };
|
||||
const request = nock(BASE_URL)
|
||||
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
|
||||
.reply(200, [artifact0, artifact1, artifact2]);
|
||||
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeResolvedTo('https://url/1');
|
||||
request.done();
|
||||
});
|
||||
|
||||
|
||||
it('should throw an error if the request fails', async () => {
|
||||
const api = new CircleCiApi(ORG, REPO, TOKEN);
|
||||
const buildNum = 12345;
|
||||
const errorMessage = 'Invalid request';
|
||||
const request = nock(BASE_URL).get(`/${buildNum}/artifacts?circle-token=${TOKEN}`);
|
||||
|
||||
request.replyWithError(errorMessage);
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeRejectedWithError(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
|
||||
request.reply(404, errorMessage);
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/1')).toBeRejectedWithError(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(request to ${BASE_URL}/${buildNum}/artifacts?circle-token=${TOKEN} failed, reason: ${errorMessage})`);
|
||||
});
|
||||
|
||||
it('should throw an error if the response does not contain the specified artifact', async () => {
|
||||
const api = new CircleCiApi(ORG, REPO, TOKEN);
|
||||
const buildNum = 12345;
|
||||
const artifact0: any = { path: 'some/path/0', url: 'https://url/0' };
|
||||
const artifact1: any = { path: 'some/path/1', url: 'https://url/1' };
|
||||
const artifact2: any = { path: 'some/path/2', url: 'https://url/2' };
|
||||
nock(BASE_URL)
|
||||
.get(`/${buildNum}/artifacts?circle-token=${TOKEN}`)
|
||||
.reply(200, [artifact0, artifact1, artifact2]);
|
||||
|
||||
await expectAsync(api.getBuildArtifactUrl(buildNum, 'some/path/3')).toBeRejectedWithError(
|
||||
`CircleCI artifact URL request failed ` +
|
||||
`(Missing artifact (some/path/3) for CircleCI build: ${buildNum})`);
|
||||
});
|
||||
});
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user