Compare commits
2 Commits
9.0.7
...
2.0.0-beta
Author | SHA1 | Date | |
---|---|---|---|
94a3f99ba3 | |||
d29ef8fdaf |
97
.bazelignore
97
.bazelignore
@ -1,97 +0,0 @@
|
||||
# Bazel does not yet support wildcards or other .gitignore semantics for
|
||||
# .bazelignore. Two issues for this feature request are outstanding:
|
||||
# https://github.com/bazelbuild/bazel/issues/7093
|
||||
# https://github.com/bazelbuild/bazel/issues/8106
|
||||
.git
|
||||
node_modules
|
||||
dist
|
||||
aio/content
|
||||
aio/node_modules
|
||||
aio/tools/examples/shared/node_modules
|
||||
packages/bazel/node_modules
|
||||
integration/bazel/bazel-bazel
|
||||
integration/bazel/bazel-bin
|
||||
integration/bazel/bazel-out
|
||||
integration/bazel/bazel-testlogs
|
||||
integration/bazel-schematics/demo
|
||||
# All integration test node_modules folders
|
||||
integration/bazel/node_modules
|
||||
integration/bazel-schematics/node_modules
|
||||
integration/cli-hello-world/node_modules
|
||||
integration/cli-hello-world-ivy-compat/node_modules
|
||||
integration/cli-hello-world-ivy-i18n/node_modules
|
||||
integration/cli-hello-world-ivy-minimal/node_modules
|
||||
integration/cli-hello-world-lazy/node_modules
|
||||
integration/cli-hello-world-lazy-rollup/node_modules
|
||||
integration/dynamic-compiler/node_modules
|
||||
integration/hello_world__closure/node_modules
|
||||
integration/hello_world__systemjs_umd/node_modules
|
||||
integration/i18n/node_modules
|
||||
integration/injectable-def/node_modules
|
||||
integration/ivy-i18n/node_modules
|
||||
integration/language_service_plugin/node_modules
|
||||
integration/ng_elements/node_modules
|
||||
integration/ng_elements_schematics/node_modules
|
||||
integration/ng_update/node_modules
|
||||
integration/ng_update_migrations/node_modules
|
||||
integration/ngcc/node_modules
|
||||
integration/platform-server/node_modules
|
||||
integration/service-worker-schema/node_modules
|
||||
integration/side-effects/node_modules
|
||||
integration/terser/node_modules
|
||||
integration/typings_test_ts36/node_modules
|
||||
integration/typings_test_ts37/node_modules
|
||||
# All integration test .yarn_local_cache folders
|
||||
integration/bazel/.yarn_local_cache
|
||||
integration/bazel-schematics/.yarn_local_cache
|
||||
integration/cli-hello-world/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-compat/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-i18n/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-minimal/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy-rollup/.yarn_local_cache
|
||||
integration/dynamic-compiler/.yarn_local_cache
|
||||
integration/hello_world__closure/.yarn_local_cache
|
||||
integration/hello_world__systemjs_umd/.yarn_local_cache
|
||||
integration/i18n/.yarn_local_cache
|
||||
integration/injectable-def/.yarn_local_cache
|
||||
integration/ivy-i18n/.yarn_local_cache
|
||||
integration/language_service_plugin/.yarn_local_cache
|
||||
integration/ng_elements/.yarn_local_cache
|
||||
integration/ng_elements_schematics/.yarn_local_cache
|
||||
integration/ng_update/.yarn_local_cache
|
||||
integration/ng_update_migrations/.yarn_local_cache
|
||||
integration/ngcc/.yarn_local_cache
|
||||
integration/platform-server/.yarn_local_cache
|
||||
integration/service-worker-schema/.yarn_local_cache
|
||||
integration/side-effects/.yarn_local_cache
|
||||
integration/terser/.yarn_local_cache
|
||||
integration/typings_test_ts36/.yarn_local_cache
|
||||
integration/typings_test_ts37/.yarn_local_cache
|
||||
# All integration test NPM_PACKAGE_MANIFEST.json folders
|
||||
integration/bazel/NPM_PACKAGE_MANIFEST.json
|
||||
integration/bazel-schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-compat/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-minimal/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy-rollup/NPM_PACKAGE_MANIFEST.json
|
||||
integration/dynamic-compiler/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__closure/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__systemjs_umd/NPM_PACKAGE_MANIFEST.json
|
||||
integration/i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/injectable-def/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/language_service_plugin/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements_schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update_migrations/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ngcc/NPM_PACKAGE_MANIFEST.json
|
||||
integration/platform-server/NPM_PACKAGE_MANIFEST.json
|
||||
integration/service-worker-schema/NPM_PACKAGE_MANIFEST.json
|
||||
integration/side-effects/NPM_PACKAGE_MANIFEST.json
|
||||
integration/terser/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts36/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts37/NPM_PACKAGE_MANIFEST.json
|
150
.bazelrc
150
.bazelrc
@ -1,150 +0,0 @@
|
||||
# Enable debugging tests with --config=debug
|
||||
test:debug --test_arg=--node_options=--inspect-brk --test_output=streamed --test_strategy=exclusive --test_timeout=9999 --nocache_test_results
|
||||
|
||||
###############################
|
||||
# Filesystem interactions #
|
||||
###############################
|
||||
|
||||
# Create symlinks in the project:
|
||||
# - dist/bin for outputs
|
||||
# - dist/testlogs, dist/genfiles
|
||||
# - bazel-out
|
||||
# NB: bazel-out should be excluded from the editor configuration.
|
||||
# The checked-in /.vscode/settings.json does this for VSCode.
|
||||
# Other editors may require manual config to ignore this directory.
|
||||
# In the past, we say a problem where VSCode traversed a massive tree, opening file handles and
|
||||
# eventually a surprising failure with auto-discovery of the C++ toolchain in
|
||||
# MacOS High Sierra.
|
||||
# See https://github.com/bazelbuild/bazel/issues/4603
|
||||
build --symlink_prefix=dist/
|
||||
|
||||
# Turn off legacy external runfiles
|
||||
build --nolegacy_external_runfiles
|
||||
run --nolegacy_external_runfiles
|
||||
test --nolegacy_external_runfiles
|
||||
|
||||
# Turn on --incompatible_strict_action_env which was on by default
|
||||
# in Bazel 0.21.0 but turned off again in 0.22.0. Follow
|
||||
# https://github.com/bazelbuild/bazel/issues/7026 for more details.
|
||||
# This flag is needed to so that the bazel cache is not invalidated
|
||||
# when running bazel via `yarn bazel`.
|
||||
# See https://github.com/angular/angular/issues/27514.
|
||||
build --incompatible_strict_action_env
|
||||
run --incompatible_strict_action_env
|
||||
test --incompatible_strict_action_env
|
||||
|
||||
###############################
|
||||
# Release support #
|
||||
# Turn on these settings with #
|
||||
# --config=release #
|
||||
###############################
|
||||
|
||||
# Releases should always be stamped with version control info
|
||||
# This command assumes node on the path and is a workaround for
|
||||
# https://github.com/bazelbuild/bazel/issues/4802
|
||||
build:release --workspace_status_command="node ./tools/bazel_stamp_vars.js"
|
||||
build:release --stamp
|
||||
|
||||
###############################
|
||||
# Output #
|
||||
###############################
|
||||
|
||||
# A more useful default output mode for bazel query
|
||||
# Prints eg. "ng_module rule //foo:bar" rather than just "//foo:bar"
|
||||
query --output=label_kind
|
||||
|
||||
# By default, failing tests don't print any output, it goes to the log file
|
||||
test --test_output=errors
|
||||
|
||||
################################
|
||||
# Settings for CircleCI #
|
||||
################################
|
||||
|
||||
# Bazel flags for CircleCI are in /.circleci/bazel.linux.rc and /.circleci/bazel.windows.rc
|
||||
|
||||
##################################
|
||||
# Settings for integration tests #
|
||||
##################################
|
||||
|
||||
# Trick bazel into treating BUILD files under integration/bazel as being regular files
|
||||
# This lets us glob() up all the files inside this integration test to make them inputs to tests
|
||||
# (Note, we cannot use common --deleted_packages because the bazel version command doesn't support it)
|
||||
build --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
query --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
|
||||
################################
|
||||
# Temporary Settings for Ivy #
|
||||
################################
|
||||
# To determine if the compiler used should be Ivy instead of ViewEngine, one can use `--config=ivy`
|
||||
# on any bazel target. This is a temporary flag until codebase is permanently switched to Ivy.
|
||||
build --define=angular_ivy_enabled=False
|
||||
|
||||
build:view-engine --define=angular_ivy_enabled=False
|
||||
build:ivy --define=angular_ivy_enabled=True
|
||||
|
||||
##################################
|
||||
# Remote Build Execution support #
|
||||
# Turn on these settings with #
|
||||
# --config=remote #
|
||||
##################################
|
||||
|
||||
# The following --define=EXECUTOR=remote will be able to be removed
|
||||
# once https://github.com/bazelbuild/bazel/issues/7254 is fixed
|
||||
build:remote --define=EXECUTOR=remote
|
||||
|
||||
# Set a higher timeout value, just in case.
|
||||
build:remote --remote_timeout=600
|
||||
|
||||
# Increase the default number of jobs by 50% because our build has lots of
|
||||
# parallelism
|
||||
build:remote --jobs=150
|
||||
build:remote --google_default_credentials
|
||||
|
||||
# Force remote exeuctions to consider the entire run as linux
|
||||
build:remote --cpu=k8
|
||||
build:remote --host_cpu=k8
|
||||
|
||||
# Toolchain and platform related flags
|
||||
build:remote --host_javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --crosstool_top=@rbe_ubuntu1604_angular//cc:toolchain
|
||||
build:remote --extra_toolchains=@rbe_ubuntu1604_angular//config:cc-toolchain
|
||||
build:remote --extra_execution_platforms=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --host_platform=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --platforms=//tools:rbe_ubuntu1604-angular
|
||||
|
||||
# Remote instance and caching
|
||||
build:remote --remote_instance_name=projects/internal-200822/instances/default_instance
|
||||
build:remote --project_id=internal-200822
|
||||
build:remote --remote_cache=remotebuildexecution.googleapis.com
|
||||
build:remote --remote_executor=remotebuildexecution.googleapis.com
|
||||
|
||||
##################################
|
||||
# Saucelabs tests settings #
|
||||
# Turn on these settings with #
|
||||
# --config=saucelabs #
|
||||
##################################
|
||||
|
||||
# For saucelabs tests we don't want to enable flaky test attempts. Karma has its own integrated
|
||||
# retry mechanism and we do not want to retry unnecessarily if Karma already tried multiple times.
|
||||
test:saucelabs --flaky_test_attempts=1
|
||||
|
||||
###############################
|
||||
# NodeJS rules settings
|
||||
# These settings are required for rules_nodejs
|
||||
###############################
|
||||
|
||||
# Turn on managed directories feature in Bazel
|
||||
# This allows us to avoid installing a second copy of node_modules
|
||||
common --experimental_allow_incremental_repository_updates
|
||||
|
||||
####################################################
|
||||
# User bazel configuration
|
||||
# NOTE: This needs to be the *last* entry in the config.
|
||||
####################################################
|
||||
|
||||
# Load any settings which are specific to the current user. Needs to be *last* statement
|
||||
# in this config, as the user configuration should be able to overwrite flags from this file.
|
||||
try-import .bazelrc.user
|
@ -1,19 +0,0 @@
|
||||
# Encryption
|
||||
|
||||
Based on https://github.com/circleci/encrypted-files
|
||||
|
||||
In the CircleCI web UI, we have a secret variable called `KEY`
|
||||
https://circleci.com/gh/angular/angular/edit#env-vars
|
||||
which is only exposed to non-fork builds
|
||||
(see "Pass secrets to builds from forked pull requests" under
|
||||
https://circleci.com/gh/angular/angular/edit#advanced-settings)
|
||||
|
||||
We use this as a symmetric AES encryption key to encrypt tokens like
|
||||
a GitHub token that enables publishing snapshots.
|
||||
|
||||
To create the github_token file, we take this approach:
|
||||
- Find the angular-builds:token in http://valentine
|
||||
- Go inside the CircleCI default docker image so you use the same version of openssl as we will at runtime: `docker run --rm -it circleci/node:10.12`
|
||||
- echo "https://[token]:@github.com" > credentials
|
||||
- openssl aes-256-cbc -e -in credentials -out .circleci/github_token -k $KEY
|
||||
- If needed, base64-encode the result so you can copy-paste it out of docker: `base64 github_token`
|
@ -1,15 +0,0 @@
|
||||
# Settings in this file should be OS agnostic. Use the bazel.<OS>.rc files for OS specific settings.
|
||||
|
||||
# Don't be spammy in the logs
|
||||
build --noshow_progress
|
||||
|
||||
# Print all the options that apply to the build.
|
||||
# This helps us diagnose which options override others
|
||||
# (e.g. /etc/bazel.bazelrc vs. tools/bazel.rc)
|
||||
build --announce_rc
|
||||
|
||||
# Retry in the event of flakes, eg. https://circleci.com/gh/angular/angular/31309
|
||||
test --flaky_test_attempts=2
|
||||
|
||||
# More details on failures
|
||||
build --verbose_failures=true
|
@ -1,21 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to /etc/bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=/home/circleci/bazel_repository_cache
|
||||
|
||||
# Workaround https://github.com/bazelbuild/bazel/issues/3645
|
||||
# Bazel doesn't calculate the memory ceiling correctly when running under Docker.
|
||||
# Limit Bazel to consuming resources that fit in CircleCI "xlarge" class
|
||||
# https://circleci.com/docs/2.0/configuration-reference/#resource_class
|
||||
build --local_resources=14336,8.0,1.0
|
||||
|
||||
# All build executed remotely should be done using our RBE configuration.
|
||||
build:remote --google_default_credentials
|
||||
build --config=remote
|
@ -1,17 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to $env:ProgramData\bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=C:/Users/circleci/bazel_repository_cache
|
||||
|
||||
# All windows jobs run on master and should use http caching
|
||||
build --remote_http_cache=https://storage.googleapis.com/angular-team-cache
|
||||
build --remote_accept_cached=true
|
||||
build --remote_upload_local_results=true
|
||||
build --google_default_credentials
|
@ -1,905 +0,0 @@
|
||||
# Configuration file for https://circleci.com/gh/angular/angular
|
||||
|
||||
# Note: YAML anchors allow an object to be re-used, reducing duplication.
|
||||
# The ampersand declares an alias for an object, then later the `<<: *name`
|
||||
# syntax dereferences it.
|
||||
# See http://blog.daemonl.com/2016/02/yaml.html
|
||||
# To validate changes, use an online parser, eg.
|
||||
# http://yaml-online-parser.appspot.com/
|
||||
|
||||
# CircleCI configuration version
|
||||
# Version 2.1 allows for extra config reuse features
|
||||
# https://circleci.com/docs/2.0/reusing-config/#getting-started-with-config-reuse
|
||||
version: 2.1
|
||||
|
||||
# We don't want to include the current branch name in the cache key because that would prevent
|
||||
# PRs from being able to restore the cache since the branch names are always different for PRs.
|
||||
# The cache key should only consist of dynamic values that change whenever something in the
|
||||
# cache changes. For example:
|
||||
# 1) yarn lock file changes --> cached "node_modules" are different.
|
||||
# 2) bazel repository definitions change --> cached bazel repositories are different.
|
||||
# Windows needs its own cache key because binaries in node_modules are different.
|
||||
# **NOTE 1 **: If you change the cache key prefix, also sync the cache_key_fallback to match.
|
||||
# **NOTE 2 **: Keep the static part of the cache key as prefix to enable correct fallbacks.
|
||||
# See https://circleci.com/docs/2.0/caching/#restoring-cache for how prefixes work in CircleCI.
|
||||
var_3: &cache_key v4-angular-node-12-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4: &cache_key_fallback v4-angular-node-12-
|
||||
var_3_win: &cache_key_win v5-angular-win-node-12-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4_win: &cache_key_win_fallback v5-angular-win-node-12-
|
||||
|
||||
# Cache key for the `components-repo-unit-tests` job. **Note** when updating the SHA in the
|
||||
# cache keys also update the SHA for the "COMPONENTS_REPO_COMMIT" environment variable.
|
||||
var_5: &components_repo_unit_tests_cache_key v5-angular-components-598db096e668aa7e9debd56eedfd127b7a55e371
|
||||
var_6: &components_repo_unit_tests_cache_key_fallback v5-angular-components-
|
||||
|
||||
# Workspace initially persisted by the `setup` job, and then enhanced by `build-npm-packages` and
|
||||
# `build-ivy-npm-packages`.
|
||||
# https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
|
||||
# https://circleci.com/blog/deep-diving-into-circleci-workspaces/
|
||||
var_7: &workspace_location ~/
|
||||
|
||||
# Filter to run a job on builds for pull requests only.
|
||||
var_8: &only_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- /pull\/\d+/
|
||||
|
||||
# Filter to skip a job on builds for pull requests.
|
||||
var_9: &skip_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
ignore:
|
||||
- /pull\/\d+/
|
||||
|
||||
# Filter to run a job on builds for the master branch only.
|
||||
var_10: &only_on_master
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
# Executor Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-executors
|
||||
# **NOTE 1**: Pin to exact images using an ID (SHA). See https://circleci.com/docs/2.0/circleci-images/#using-a-docker-image-id-to-pin-an-image-to-a-fixed-version.
|
||||
# (Using the tag in not necessary when pinning by ID, but include it anyway for documentation purposes.)
|
||||
# **NOTE 2**: If you change the version of the docker images, also change the `cache_key` suffix.
|
||||
# **NOTE 3**: If you change the version of the `*-browsers` docker image, make sure the
|
||||
# `--versions.chrome` arg in `integration/bazel-schematics/test.sh` specifies a
|
||||
# ChromeDriver version that is compatible with the Chrome version in the image.
|
||||
executors:
|
||||
default-executor:
|
||||
parameters:
|
||||
resource_class:
|
||||
type: string
|
||||
default: medium
|
||||
docker:
|
||||
- image: circleci/node:12.14.1@sha256:f9de24fc0017059cc42ef7d07db060008af65a98b1f0cdd1ef3339213226bf6d
|
||||
resource_class: << parameters.resource_class >>
|
||||
working_directory: ~/ng
|
||||
|
||||
windows-executor:
|
||||
working_directory: ~/ng
|
||||
resource_class: windows.medium
|
||||
# CircleCI windows VMs do have the GitBash shell available:
|
||||
# https://github.com/CircleCI-Public/windows-preview-docs#shells
|
||||
# But in this specific case we really should not use it because Bazel must not be ran from
|
||||
# GitBash. These issues discuss why:
|
||||
# https://github.com/bazelbuild/bazel/issues/5751
|
||||
# https://github.com/bazelbuild/bazel/issues/5724#issuecomment-410194038
|
||||
# https://github.com/bazelbuild/bazel/issues/6339#issuecomment-441600879
|
||||
shell: powershell.exe -ExecutionPolicy Bypass
|
||||
machine:
|
||||
# Windows preview image that includes the following:
|
||||
# - Visual Studio 2019 build tools
|
||||
# - Node 12
|
||||
# - yarn 1.17
|
||||
# - Python 3 3.7.4
|
||||
image: windows-server-2019-vs2019:201908-02
|
||||
|
||||
# Command Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-commands
|
||||
commands:
|
||||
custom_attach_workspace:
|
||||
description: Attach workspace at a predefined location
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: *workspace_location
|
||||
|
||||
# Install shared libs used by Chrome that is either provisioned by
|
||||
# rules_webtesting or by puppeteer.
|
||||
install_chrome_libs:
|
||||
description: Install shared Chrome libs
|
||||
steps:
|
||||
- run:
|
||||
name: Install shared Chrome libs
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install GTK+ graphical user interface (libgtk-3-0), advanced linux sound architecture (libasound2)
|
||||
# and network security service libraries (libnss3) & X11 Screen Saver extension library (libssx1)
|
||||
# which are dependendies of chrome & needed for karma & protractor headless chrome tests.
|
||||
# This is a very small install which takes around 7s in comparing to using the full
|
||||
# circleci/node:x.x.x-browsers image.
|
||||
sudo apt-get -y install libgtk-3-0 libasound2 libnss3 libxss1
|
||||
|
||||
# Install java runtime which is required by some integration tests such as
|
||||
# //integration:hello_world__closure_test, //integration:i18n_test and
|
||||
# //integration:ng_elements_test to run the closure compiler
|
||||
install_java:
|
||||
description: Install java
|
||||
steps:
|
||||
- run:
|
||||
name: Install java
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install java runtime
|
||||
sudo apt-get install default-jre
|
||||
|
||||
# Initializes the CI environment by setting up common environment variables.
|
||||
init_environment:
|
||||
description: Initializing environment (setting up variables)
|
||||
steps:
|
||||
- run:
|
||||
name: Set up environment
|
||||
environment:
|
||||
CIRCLE_GIT_BASE_REVISION: << pipeline.git.base_revision >>
|
||||
CIRCLE_GIT_REVISION: << pipeline.git.revision >>
|
||||
command: ./.circleci/env.sh
|
||||
- run:
|
||||
# Configure git as the CircleCI `checkout` command does.
|
||||
# This is needed because we only checkout on the setup job.
|
||||
# Add GitHub to known hosts
|
||||
name: Configure git
|
||||
command: |
|
||||
mkdir -p ~/.ssh
|
||||
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' >> ~/.ssh/known_hosts
|
||||
git config --global url."ssh://git@github.com".insteadOf "https://github.com" || true
|
||||
git config --global gc.auto 0 || true
|
||||
|
||||
init_saucelabs_environment:
|
||||
description: Sets up a domain that resolves to the local host.
|
||||
steps:
|
||||
- run:
|
||||
name: Preparing environment for running tests on Saucelabs.
|
||||
command: |
|
||||
# For SauceLabs jobs, we set up a domain which resolves to the machine which launched
|
||||
# the tunnel. We do this because devices are sometimes not able to properly resolve
|
||||
# `localhost` or `127.0.0.1` through the SauceLabs tunnel. Using a domain that does not
|
||||
# resolve to anything on SauceLabs VMs ensures that such requests are always resolved
|
||||
# through the tunnel, and resolve to the actual tunnel host machine (i.e. the CircleCI VM).
|
||||
# More context can be found in: https://github.com/angular/angular/pull/35171.
|
||||
setPublicVar SAUCE_LOCALHOST_ALIAS_DOMAIN "angular-ci.local"
|
||||
setSecretVar SAUCE_ACCESS_KEY $(echo $SAUCE_ACCESS_KEY | rev)
|
||||
- run:
|
||||
# Sets up a local domain in the machine's host file that resolves to the local
|
||||
# host. This domain is helpful in Saucelabs tests where devices are not able to
|
||||
# properly resolve `localhost` or `127.0.0.1` through the sauce-connect tunnel.
|
||||
name: Setting up alias domain for local host.
|
||||
command: echo "127.0.0.1 $SAUCE_LOCALHOST_ALIAS_DOMAIN" | sudo tee -a /etc/hosts
|
||||
|
||||
# Normally this would be an individual job instead of a command.
|
||||
# But startup and setup time for each invidual windows job are high enough to discourage
|
||||
# many small jobs, so instead we use a command for setup unless the gain becomes significant.
|
||||
setup_win:
|
||||
description: Setup windows node environment
|
||||
steps:
|
||||
# Use the Linux workspace directly, as it already has checkout, rebased and node modules.
|
||||
- custom_attach_workspace
|
||||
# Install Bazel pre-requisites that aren't in the preconfigured CircleCI Windows VM.
|
||||
- run: ./.circleci/windows-env.ps1
|
||||
- run: node --version
|
||||
- run: yarn --version
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key_win
|
||||
- *cache_key_win_fallback
|
||||
# Reinstall to get windows binaries.
|
||||
- run: yarn install --frozen-lockfile --non-interactive
|
||||
# Install @bazel/bazel globally and use that for the first run.
|
||||
# Workaround for https://github.com/bazelbuild/rules_nodejs/issues/894
|
||||
- run: yarn global add @bazel/bazel@$env:BAZEL_VERSION
|
||||
- run: bazel info
|
||||
|
||||
notify_webhook_on_fail:
|
||||
description: Notify a webhook about failure
|
||||
parameters:
|
||||
# `webhook_url_env_var` are secret env vars defined in CircleCI project settings.
|
||||
# The URLs come from https://angular-team.slack.com/apps/A0F7VRE7N-circleci.
|
||||
webhook_url_env_var:
|
||||
type: env_var_name
|
||||
steps:
|
||||
- run:
|
||||
when: on_fail
|
||||
command: |
|
||||
notificationJson="{\"text\":\":x: \`$CIRCLE_JOB\` job for $CIRCLE_BRANCH branch failed on build $CIRCLE_BUILD_NUM: $CIRCLE_BUILD_URL :scream:\"}"
|
||||
curl --request POST --header "Content-Type: application/json" --data "$notificationJson" ${<< parameters.webhook_url_env_var >>}
|
||||
|
||||
# Job definitions
|
||||
# Jobs can include parameters that are passed in the workflow job invocation.
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-parameterized-jobs
|
||||
jobs:
|
||||
setup:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Rebase PR on target branch
|
||||
# After checkout, rebase on top of target branch.
|
||||
command: >
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then
|
||||
# User is required for rebase.
|
||||
git config user.name "angular-ci"
|
||||
git config user.email "angular-ci"
|
||||
# Rebase PR on top of target branch.
|
||||
node tools/rebase-pr.js angular/angular ${CIRCLE_PR_NUMBER}
|
||||
else
|
||||
echo "This build is not over a PR, nothing to do."
|
||||
fi
|
||||
# This cache is saved in the build-npm-packages so that Bazel cache is also included.
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key
|
||||
- *cache_key_fallback
|
||||
- init_environment
|
||||
- run:
|
||||
name: Running Yarn install
|
||||
command: yarn install --frozen-lockfile --non-interactive
|
||||
# Yarn's requests sometimes take more than 10mins to complete.
|
||||
no_output_timeout: 45m
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
# Make the bazel directories and add a file to them if they don't exist already so that
|
||||
# persist_to_workspace does not fail.
|
||||
- run: |
|
||||
if [ ! -d ~/bazel_repository_cache ]; then
|
||||
mkdir ~/bazel_repository_cache
|
||||
touch ~/bazel_repository_cache/MARKER
|
||||
fi
|
||||
# Persist any changes at this point to be reused by further jobs.
|
||||
# **NOTE**: To add new content to the workspace, always persist on the same root.
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ./ng
|
||||
- ./bazel_repository_cache
|
||||
|
||||
lint:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
|
||||
- run: 'yarn bazel:format -mode=check ||
|
||||
(echo "BUILD files not formatted. Please run ''yarn bazel:format''" ; exit 1)'
|
||||
# Run the skylark linter to check our Bazel rules
|
||||
- run: 'yarn bazel:lint ||
|
||||
(echo -e "\n.bzl files have lint errors. Please run ''yarn bazel:lint-fix''"; exit 1)'
|
||||
|
||||
- run: yarn lint
|
||||
- run: yarn ts-circular-deps:check
|
||||
- run: node tools/pullapprove/verify.js
|
||||
|
||||
test:
|
||||
executor:
|
||||
name: default-executor
|
||||
# Now that large integration tests are running locally in parallel (they can't run on RBE yet
|
||||
# as they require network access for yarn install), this test is running out of memory
|
||||
# consistently with the xlarge machine.
|
||||
# TODO: switch back to xlarge once integration tests are running on remote-exec
|
||||
resource_class: 2xlarge+
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- install_java
|
||||
- run:
|
||||
command: yarn bazel test //... --build_tag_filters=-ivy-only --test_tag_filters=-ivy-only
|
||||
no_output_timeout: 20m
|
||||
|
||||
# Temporary job to test what will happen when we flip the Ivy flag to true
|
||||
test_ivy_aot:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# We need to explicitly specify the --symlink_prefix option because otherwise we would
|
||||
# not be able to easily find the output bin directory when uploading artifacts for size
|
||||
# measurements.
|
||||
- run:
|
||||
command: yarn test-ivy-aot //... --symlink_prefix=dist/
|
||||
no_output_timeout: 20m
|
||||
|
||||
# Publish bundle artifacts which will be used to calculate the size change. **Note**: Make
|
||||
# sure that the size plugin from the Angular robot fetches the artifacts from this CircleCI
|
||||
# job (see .github/angular-robot.yml). Additionally any artifacts need to be stored with the
|
||||
# following path format: "{projectName}/{context}/{fileName}". This format is necessary
|
||||
# because otherwise the bot is not able to pick up the artifacts from CircleCI. See:
|
||||
# https://github.com/angular/github-robot/blob/master/functions/src/plugins/size.ts#L392-L394
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/hello_world/bundle.min.js
|
||||
destination: core/hello_world/bundle
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/todo/bundle.min.js
|
||||
destination: core/todo/bundle
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/hello_world/bundle.min.js.br
|
||||
destination: core/hello_world/bundle.br
|
||||
- store_artifacts:
|
||||
path: dist/bin/packages/core/test/bundling/todo/bundle.min.js.br
|
||||
destination: core/todo/bundle.br
|
||||
|
||||
# NOTE: This is currently limited to master builds only. See the `monitoring` configuration.
|
||||
saucelabs_view_engine:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Run Bazel tests on Saucelabs with ViewEngine
|
||||
# See /tools/saucelabs/README.md for more info
|
||||
command: |
|
||||
yarn bazel run //tools/saucelabs:sauce_service_setup
|
||||
TESTS=$(./node_modules/.bin/bazelisk query --output label '(kind(karma_web_test, ...) intersect attr("tags", "saucelabs", ...)) except attr("tags", "ivy-only", ...) except attr("tags", "fixme-saucelabs-ve", ...)')
|
||||
yarn bazel test --config=saucelabs ${TESTS}
|
||||
yarn bazel run //tools/saucelabs:sauce_service_stop
|
||||
no_output_timeout: 40m
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
# NOTE: This is currently limited to master builds only. See the `monitoring` configuration.
|
||||
saucelabs_ivy:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Run Bazel tests on Saucelabs with Ivy
|
||||
# See /tools/saucelabs/README.md for more info
|
||||
command: |
|
||||
yarn bazel run //tools/saucelabs:sauce_service_setup
|
||||
TESTS=$(./node_modules/.bin/bazelisk query --output label '(kind(karma_web_test, ...) intersect attr("tags", "saucelabs", ...)) except attr("tags", "no-ivy-aot", ...) except attr("tags", "fixme-saucelabs-ivy", ...)')
|
||||
yarn bazel test --config=saucelabs --config=ivy ${TESTS}
|
||||
yarn bazel run //tools/saucelabs:sauce_service_stop
|
||||
no_output_timeout: 40m
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
test_aio:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Compile dependencies to ivy
|
||||
# Running `ngcc` here (instead of implicitly via `ng build`) allows us to take advantage of
|
||||
# the parallel, async mode speed-up (~20-25s on CI).
|
||||
- run: yarn --cwd aio ngcc --properties es2015
|
||||
# Build aio
|
||||
- run: yarn --cwd aio build --progress=false
|
||||
# Lint the code
|
||||
- run: yarn --cwd aio lint
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
# Run accessibility tests
|
||||
- run: yarn --cwd aio test-a11y-score-localhost
|
||||
# Check the bundle sizes.
|
||||
- run: yarn --cwd aio payload-size
|
||||
# Run unit tests for Firebase redirects
|
||||
- run: yarn --cwd aio redirects-test
|
||||
|
||||
deploy_aio:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Deploy angular.io to production (if necessary)
|
||||
- run: setPublicVar_CI_STABLE_BRANCH
|
||||
- run: yarn --cwd aio deploy-production
|
||||
|
||||
test_aio_local:
|
||||
parameters:
|
||||
viewengine:
|
||||
type: boolean
|
||||
default: false
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Build aio (with local Angular packages)
|
||||
- run: yarn --cwd aio build-local<<# parameters.viewengine >>-with-viewengine<</ parameters.viewengine >>-ci
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
# Check the bundle sizes.
|
||||
- run: yarn --cwd aio payload-size aio-local<<# parameters.viewengine >>-viewengine<</ parameters.viewengine >>
|
||||
|
||||
test_aio_tools:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Install
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run: yarn --cwd aio extract-cli-command-docs
|
||||
# Run tools tests
|
||||
- run: yarn --cwd aio tools-test
|
||||
- run: ./aio/aio-builds-setup/scripts/test.sh
|
||||
|
||||
test_docs_examples:
|
||||
parameters:
|
||||
ivy:
|
||||
type: boolean
|
||||
default: false
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
parallelism: 5
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Install aio
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
# Run examples tests. The "CIRCLE_NODE_INDEX" will be set if "parallelism" is enabled.
|
||||
# Since the parallelism is set to "5", there will be five parallel CircleCI containers.
|
||||
# with either "0", "1", etc as node index. This can be passed to the "--shard" argument.
|
||||
- run: yarn --cwd aio example-e2e --setup --local <<# parameters.ivy >>--ivy<</ parameters.ivy >> --cliSpecsConcurrency=5 --shard=${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL} --retry 2
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
aio_preview:
|
||||
executor: default-executor
|
||||
environment:
|
||||
AIO_SNAPSHOT_ARTIFACT_PATH: &aio_preview_artifact_path 'aio/tmp/snapshot.tgz'
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: ./aio/scripts/build-artifacts.sh $AIO_SNAPSHOT_ARTIFACT_PATH $CI_PULL_REQUEST $CI_COMMIT
|
||||
- store_artifacts:
|
||||
path: *aio_preview_artifact_path
|
||||
# The `destination` needs to be kept in synch with the value of
|
||||
# `AIO_ARTIFACT_PATH` in `aio/aio-builds-setup/Dockerfile`
|
||||
destination: aio/dist/aio-snapshot.tgz
|
||||
- run: node ./aio/scripts/create-preview $CIRCLE_BUILD_NUM
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
test_aio_preview:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run:
|
||||
name: Wait for preview and run tests
|
||||
command: node aio/scripts/test-preview.js $CI_PULL_REQUEST $CI_COMMIT $CI_AIO_MIN_PWA_SCORE
|
||||
|
||||
|
||||
# The `build-npm-packages` tasks exist for backwards-compatibility with old scripts and
|
||||
# tests that rely on the pre-Bazel `dist/packages-dist` output structure (build.sh).
|
||||
# Having multiple jobs that independently build in this manner duplicates some work; we build
|
||||
# the bazel packages more than once. Even though we have a remote cache, these jobs will
|
||||
# typically run in parallel so up-to-date outputs will not be available at the time the build
|
||||
# starts.
|
||||
|
||||
# Build the view engine npm packages. No new jobs should depend on this.
|
||||
build-npm-packages:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: node scripts/build/build-packages-dist.js
|
||||
|
||||
# Save the npm packages from //packages/... for other workflow jobs to read
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ng/dist/packages-dist
|
||||
- ng/dist/zone.js-dist
|
||||
|
||||
# Save dependencies and bazel repository cache to use on subsequent runs.
|
||||
- save_cache:
|
||||
key: *cache_key
|
||||
paths:
|
||||
- "node_modules"
|
||||
- "aio/node_modules"
|
||||
- "~/bazel_repository_cache"
|
||||
|
||||
# Build the ivy npm packages.
|
||||
build-ivy-npm-packages:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: node scripts/build/build-ivy-npm-packages.js
|
||||
|
||||
# Save the npm packages from //packages/... for other workflow jobs to read
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ng/dist/packages-dist-ivy-aot
|
||||
- ng/dist/zone.js-dist-ivy-aot
|
||||
|
||||
# This job creates compressed tarballs (`.tgz` files) for all Angular packages and stores them as
|
||||
# build artifacts. This makes it easy to try out changes from a PR build for testing purposes.
|
||||
# More info CircleCI build artifacts: https://circleci.com/docs/2.0/artifacts
|
||||
#
|
||||
# NOTE: Currently, this job only runs for PR builds. See `publish_snapshot` for non-PR builds.
|
||||
publish_packages_as_artifacts:
|
||||
executor: default-executor
|
||||
environment:
|
||||
NG_PACKAGES_DIR: &ng_packages_dir 'dist/packages-dist'
|
||||
NG_PACKAGES_ARCHIVES_DIR: &ng_packages_archives_dir 'dist/packages-dist-archives'
|
||||
ZONEJS_PACKAGES_DIR: &zonejs_packages_dir 'dist/zone.js-dist'
|
||||
ZONEJS_PACKAGES_ARCHIVES_DIR: &zonejs_packages_archives_dir 'dist/zone.js-dist-archives'
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Publish `@angular/*` packages.
|
||||
- run:
|
||||
name: Create artifacts for @angular/* packages
|
||||
command: ./scripts/ci/create-package-archives.sh $CI_BRANCH $CI_COMMIT $NG_PACKAGES_DIR $NG_PACKAGES_ARCHIVES_DIR
|
||||
- store_artifacts:
|
||||
path: *ng_packages_archives_dir
|
||||
destination: angular
|
||||
# Publish `zone.js` package.
|
||||
- run:
|
||||
name: Create artifacts for zone.js package
|
||||
command: ./scripts/ci/create-package-archives.sh $CI_BRANCH $CI_COMMIT $ZONEJS_PACKAGES_DIR $ZONEJS_PACKAGES_ARCHIVES_DIR
|
||||
- store_artifacts:
|
||||
path: *zonejs_packages_archives_dir
|
||||
destination: zone.js
|
||||
|
||||
# This job updates the content of repos like github.com/angular/core-builds
|
||||
# for every green build on angular/angular.
|
||||
publish_snapshot:
|
||||
executor: default-executor
|
||||
steps:
|
||||
# See below - ideally this job should not trigger for non-upstream builds.
|
||||
# But since it does, we have to check this condition.
|
||||
- run:
|
||||
name: Skip this job for Pull Requests and Fork builds
|
||||
# Note: Using `CIRCLE_*` env variables (instead of those defined in `env.sh` so that this
|
||||
# step can be run before `init_environment`.
|
||||
command: >
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]] ||
|
||||
[[ "$CIRCLE_PROJECT_USERNAME" != "angular" ]] ||
|
||||
[[ "$CIRCLE_PROJECT_REPONAME" != "angular" ]]; then
|
||||
circleci step halt
|
||||
fi
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# CircleCI has a config setting to force SSH for all github connections
|
||||
# This is not compatible with our mechanism of using a Personal Access Token
|
||||
# Clear the global setting
|
||||
- run: git config --global --unset "url.ssh://git@github.com.insteadof"
|
||||
- run:
|
||||
name: Decrypt github credentials
|
||||
# We need ensure that the same default digest is used for encoding and decoding with
|
||||
# openssl. Openssl versions might have different default digests which can cause
|
||||
# decryption failures based on the installed openssl version. https://stackoverflow.com/a/39641378/4317734
|
||||
command: 'openssl aes-256-cbc -d -in .circleci/github_token -md md5 -k "${KEY}" -out ~/.git_credentials'
|
||||
- run: ./scripts/ci/publish-build-artifacts.sh
|
||||
|
||||
aio_monitoring_stable:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run: setPublicVar_CI_STABLE_BRANCH
|
||||
- run:
|
||||
name: Check out `aio/` and yarn from the stable branch
|
||||
command: |
|
||||
git fetch origin $CI_STABLE_BRANCH
|
||||
git checkout --force origin/$CI_STABLE_BRANCH -- aio/ .yarn/ .yarnrc
|
||||
# Ignore yarn's engines check, because we checked out `aio/package.json` from the stable
|
||||
# branch and there could be a node version skew, which is acceptable in this monitoring job.
|
||||
- run: yarn config set ignore-engines true
|
||||
- run:
|
||||
name: Run tests against https://angular.io/
|
||||
command: ./aio/scripts/test-production.sh https://angular.io/ $CI_AIO_MIN_PWA_SCORE
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_CARETAKER_WEBHOOK_URL
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
aio_monitoring_next:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run:
|
||||
name: Run tests against https://next.angular.io/
|
||||
command: ./aio/scripts/test-production.sh https://next.angular.io/ $CI_AIO_MIN_PWA_SCORE
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_CARETAKER_WEBHOOK_URL
|
||||
- notify_webhook_on_fail:
|
||||
webhook_url_env_var: SLACK_DEV_INFRA_CI_FAILURES_WEBHOOK_URL
|
||||
|
||||
legacy-unit-tests-saucelabs:
|
||||
executor:
|
||||
name: default-executor
|
||||
# In order to avoid the bottleneck of having a slow host machine, we acquire a better
|
||||
# container for this job. This is necessary because we launch a lot of browsers concurrently
|
||||
# and therefore the tunnel and Karma need to process a lot of file requests and tests.
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- init_saucelabs_environment
|
||||
- run:
|
||||
name: Starting Saucelabs tunnel service
|
||||
command: ./tools/saucelabs/sauce-service.sh run
|
||||
background: true
|
||||
- run: yarn tsc -p packages
|
||||
- run: yarn tsc -p modules
|
||||
- run: yarn bazel build //packages/zone.js:npm_package
|
||||
- run:
|
||||
# Waiting on ready ensures that we don't run tests too early without Saucelabs not being ready.
|
||||
name: Waiting for Saucelabs tunnel to connect
|
||||
command: ./tools/saucelabs/sauce-service.sh ready-wait
|
||||
- run:
|
||||
name: Running tests on Saucelabs.
|
||||
command: |
|
||||
browsers=$(node -e 'console.log(require("./browser-providers.conf").sauceAliases.CI_REQUIRED.join(","))')
|
||||
yarn karma start ./karma-js.conf.js --single-run --browsers=${browsers}
|
||||
- run:
|
||||
name: Stop Saucelabs tunnel service
|
||||
command: ./tools/saucelabs/sauce-service.sh stop
|
||||
|
||||
# Job that runs all unit tests of the `angular/components` repository.
|
||||
components-repo-unit-tests:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Restore the cache before cloning the repository because the clone script re-uses
|
||||
# the restored repository if present. This reduces the amount of times the components
|
||||
# repository needs to be cloned (this is slow and increases based on commits in the repo).
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *components_repo_unit_tests_cache_key
|
||||
# Whenever the `angular/components` SHA is updated, the cache key will no longer
|
||||
# match. The fallback cache will still match, and CircleCI will restore the most
|
||||
# recently cached repository folder. Without the fallback cache, we'd need to download
|
||||
# the repository from scratch and it would slow down the job. This is because we can't
|
||||
# clone the repository with reduced `--depth`, but rather need to clone the whole
|
||||
# repository to be able to support arbitrary SHAs.
|
||||
- *components_repo_unit_tests_cache_key_fallback
|
||||
- run:
|
||||
name: "Fetching angular/components repository"
|
||||
command: ./scripts/ci/clone_angular_components_repo.sh
|
||||
- run:
|
||||
# Run yarn install to fetch the Bazel binaries as used in the components repo.
|
||||
name: Installing dependencies.
|
||||
# TODO: remove this once the repo has been updated to use NodeJS v12 and Yarn 1.19.1.
|
||||
# We temporarily ignore the "engines" because the Angular components repository has
|
||||
# minimum dependency on NodeJS v12 and Yarn 1.19.1, but the framework repository uses
|
||||
# older versions.
|
||||
command: yarn --ignore-engines --cwd ${COMPONENTS_REPO_TMP_DIR} install --frozen-lockfile --non-interactive
|
||||
- save_cache:
|
||||
key: *components_repo_unit_tests_cache_key
|
||||
paths:
|
||||
# Temporary directory must be kept in sync with the `$COMPONENTS_REPO_TMP_DIR` env
|
||||
# variable. It needs to be hardcoded here, because env variables interpolation is
|
||||
# not supported.
|
||||
- "/tmp/angular-components-repo"
|
||||
- run:
|
||||
# Updates the `angular/components` `package.json` file to refer to the release output
|
||||
# inside the `packages-dist` directory. Note that it's not necessary to perform a yarn
|
||||
# install as Bazel runs Yarn automatically when needed.
|
||||
name: Setting up release packages.
|
||||
command: node scripts/ci/update-deps-to-dist-packages.js ${COMPONENTS_REPO_TMP_DIR}/package.json dist/packages-dist/
|
||||
- run:
|
||||
name: "Running `angular/components` unit tests"
|
||||
command: ./scripts/ci/run_angular_components_unit_tests.sh
|
||||
|
||||
test_zonejs:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Install
|
||||
- run: yarn --cwd packages/zone.js install --frozen-lockfile --non-interactive
|
||||
# Run zone.js tools tests
|
||||
- run: yarn --cwd packages/zone.js promisetest
|
||||
- run: yarn --cwd packages/zone.js promisefinallytest
|
||||
- run: yarn bazel build //packages/zone.js:npm_package &&
|
||||
cp dist/bin/packages/zone.js/npm_package/dist/zone-mix.js ./packages/zone.js/test/extra/ &&
|
||||
cp dist/bin/packages/zone.js/npm_package/dist/zone-patch-electron.js ./packages/zone.js/test/extra/ &&
|
||||
yarn --cwd packages/zone.js electrontest
|
||||
|
||||
# Windows jobs
|
||||
# Docs: https://circleci.com/docs/2.0/hello-world-windows/
|
||||
test_win:
|
||||
executor: windows-executor
|
||||
steps:
|
||||
- setup_win
|
||||
- run:
|
||||
# Ran into a command parsing problem where `-browser:chromium-local` was converted to
|
||||
# `-browser: chromium-local` (a space was added) in https://circleci.com/gh/angular/angular/357511.
|
||||
# Probably a powershell command parsing thing. There's no problem using a yarn script though.
|
||||
command: yarn circleci-win-ve
|
||||
no_output_timeout: 45m
|
||||
# Save bazel repository cache to use on subsequent runs.
|
||||
# We don't save node_modules because it's faster to use the linux workspace and reinstall.
|
||||
- save_cache:
|
||||
key: *cache_key_win
|
||||
paths:
|
||||
- "C:/Users/circleci/bazel_repository_cache"
|
||||
|
||||
test_ivy_aot_win:
|
||||
executor: windows-executor
|
||||
steps:
|
||||
- setup_win
|
||||
- run:
|
||||
command: yarn circleci-win-ivy
|
||||
no_output_timeout: 45m
|
||||
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
default_workflow:
|
||||
jobs:
|
||||
- setup:
|
||||
filters:
|
||||
branches:
|
||||
ignore: g3
|
||||
- lint:
|
||||
requires:
|
||||
- setup
|
||||
- test:
|
||||
requires:
|
||||
- setup
|
||||
- test_ivy_aot:
|
||||
requires:
|
||||
- setup
|
||||
- build-npm-packages:
|
||||
requires:
|
||||
- setup
|
||||
- build-ivy-npm-packages:
|
||||
requires:
|
||||
- setup
|
||||
- legacy-unit-tests-saucelabs:
|
||||
requires:
|
||||
- setup
|
||||
- test_aio:
|
||||
requires:
|
||||
- setup
|
||||
- deploy_aio:
|
||||
requires:
|
||||
- test_aio
|
||||
- test_aio_local:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_aio_local:
|
||||
name: test_aio_local_viewengine
|
||||
viewengine: true
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_aio_tools:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_docs_examples:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_docs_examples:
|
||||
name: test_docs_examples_ivy
|
||||
ivy: true
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- aio_preview:
|
||||
# Only run on PR builds. (There can be no previews for non-PR builds.)
|
||||
<<: *only_on_pull_requests
|
||||
requires:
|
||||
- setup
|
||||
- test_aio_preview:
|
||||
requires:
|
||||
- aio_preview
|
||||
- publish_packages_as_artifacts:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- publish_snapshot:
|
||||
# Note: no filters on this job because we want it to run for all upstream branches
|
||||
# We'd really like to filter out pull requests here, but not yet available:
|
||||
# https://discuss.circleci.com/t/workflows-pull-request-filter/14396/4
|
||||
# Instead, the job just exits immediately at the first step.
|
||||
requires:
|
||||
# Only publish if tests and integration tests pass
|
||||
- test
|
||||
- test_ivy_aot
|
||||
# Only publish if `aio`/`docs` tests using the locally built Angular packages pass
|
||||
- test_aio_local
|
||||
- test_aio_local_viewengine
|
||||
- test_docs_examples
|
||||
- test_docs_examples_ivy
|
||||
# Get the artifacts to publish from the build-packages-dist job
|
||||
# since the publishing script expects the legacy outputs layout.
|
||||
- build-npm-packages
|
||||
- build-ivy-npm-packages
|
||||
- legacy-unit-tests-saucelabs
|
||||
- components-repo-unit-tests:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_zonejs:
|
||||
requires:
|
||||
- setup
|
||||
# Windows Jobs
|
||||
# These are very slow so we run them on non-PRs only for now.
|
||||
# TODO: remove the filter when CircleCI makes Windows FS faster.
|
||||
# The Windows jobs are only run after their non-windows counterparts finish successfully.
|
||||
# This isn't strictly necessary as there is no artifact dependency, but helps economize
|
||||
# CI resources by not attempting to build when we know should fail.
|
||||
- test_win:
|
||||
<<: *skip_on_pull_requests
|
||||
requires:
|
||||
- test
|
||||
- test_ivy_aot_win:
|
||||
<<: *skip_on_pull_requests
|
||||
requires:
|
||||
- test_ivy_aot
|
||||
|
||||
monitoring:
|
||||
jobs:
|
||||
- setup
|
||||
- aio_monitoring_stable:
|
||||
requires:
|
||||
- setup
|
||||
- aio_monitoring_next:
|
||||
requires:
|
||||
- setup
|
||||
- saucelabs_ivy:
|
||||
# Testing saucelabs via Bazel currently taking longer than the legacy saucelabs job as it
|
||||
# each karma_web_test target is provisioning and tearing down browsers which is adding
|
||||
# a lot of overhead. Running once daily on master only to avoid wasting resources and
|
||||
# slowing down CI for PRs.
|
||||
# TODO: Run this job on all branches (including PRs) once karma_web_test targets can
|
||||
# share provisioned browsers and we can remove the legacy saucelabs job.
|
||||
requires:
|
||||
- setup
|
||||
- saucelabs_view_engine:
|
||||
# Testing saucelabs via Bazel currently taking longer than the legacy saucelabs job as it
|
||||
# each karma_web_test target is provisioning and tearing down browsers which is adding
|
||||
# a lot of overhead. Running once daily on master only to avoid wasting resources and
|
||||
# slowing down CI for PRs.
|
||||
# TODO: Run this job on all branches (including PRs) once karma_web_test targets can
|
||||
# share provisioned browsers and we can remove the legacy saucelabs job.
|
||||
requires:
|
||||
- setup
|
||||
triggers:
|
||||
- schedule:
|
||||
<<: *only_on_master
|
||||
# Runs monitoring jobs at 10:00AM every day.
|
||||
cron: "0 10 * * *"
|
@ -1,73 +0,0 @@
|
||||
####################################################################################################
|
||||
# Helpers for defining environment variables for CircleCI.
|
||||
#
|
||||
# In CircleCI, each step runs in a new shell. The way to share ENV variables across steps is to
|
||||
# export them from `$BASH_ENV`, which is automatically sourced at the beginning of every step (for
|
||||
# the default `bash` shell).
|
||||
#
|
||||
# See also https://circleci.com/docs/2.0/env-vars/#using-bash_env-to-set-environment-variables.
|
||||
####################################################################################################
|
||||
|
||||
# Set and print an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are public, i.e. it is OK for them to be
|
||||
# visible to anyone through the CI logs.
|
||||
#
|
||||
# Usage: `setPublicVar <name> <value>`
|
||||
function setPublicVar() {
|
||||
setSecretVar $1 "$2";
|
||||
echo "$1=$2";
|
||||
}
|
||||
|
||||
# Set (without printing) an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are secret, i.e. should not be visible to
|
||||
# everyone through the CI logs.
|
||||
#
|
||||
# Usage: `setSecretVar <name> <value>`
|
||||
function setSecretVar() {
|
||||
# WARNING: Secrets (e.g. passwords, access tokens) should NOT be printed.
|
||||
# (Keep original shell options to restore at the end.)
|
||||
local -r originalShellOptions=$(set +o);
|
||||
set +x -eu -o pipefail;
|
||||
|
||||
echo "export $1=\"${2:-}\";" >> $BASH_ENV;
|
||||
|
||||
# Restore original shell options.
|
||||
eval "$originalShellOptions";
|
||||
}
|
||||
|
||||
|
||||
# Create a function to set an environment variable, when called.
|
||||
#
|
||||
# Use this function for creating setter for public environment variables that require expensive or
|
||||
# time-consuming computaions and may not be needed. When needed, you can call this function to set
|
||||
# the environment variable (which will be available through `$BASH_ENV` from that point onwards).
|
||||
#
|
||||
# Arguments:
|
||||
# - `<name>`: The name of the environment variable. The generated setter function will be
|
||||
# `setPublicVar_<name>`.
|
||||
# - `<code>`: The code to run to compute the value for the variable. Since this code should be
|
||||
# executed lazily, it must be properly escaped. For example:
|
||||
# ```sh
|
||||
# # DO NOT do this:
|
||||
# createPublicVarSetter MY_VAR "$(whoami)"; # `whoami` will be evaluated eagerly
|
||||
#
|
||||
# # DO this isntead:
|
||||
# createPublicVarSetter MY_VAR "\$(whoami)"; # `whoami` will NOT be evaluated eagerly
|
||||
# ```
|
||||
#
|
||||
# Usage: `createPublicVarSetter <name> <code>`
|
||||
#
|
||||
# Example:
|
||||
# ```sh
|
||||
# createPublicVarSetter MY_VAR 'echo "FOO"';
|
||||
# echo $MY_VAR; # Not defined
|
||||
#
|
||||
# setPublicVar_MY_VAR;
|
||||
# source $BASH_ENV;
|
||||
# echo $MY_VAR; # FOO
|
||||
# ```
|
||||
function createPublicVarSetter() {
|
||||
echo "setPublicVar_$1() { setPublicVar $1 \"$2\"; }" >> $BASH_ENV;
|
||||
}
|
107
.circleci/env.sh
107
.circleci/env.sh
@ -1,107 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Variables
|
||||
readonly projectDir=$(realpath "$(dirname ${BASH_SOURCE[0]})/..")
|
||||
readonly envHelpersPath="$projectDir/.circleci/env-helpers.inc.sh";
|
||||
|
||||
# Load helpers and make them available everywhere (through `$BASH_ENV`).
|
||||
source $envHelpersPath;
|
||||
echo "source $envHelpersPath;" >> $BASH_ENV;
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define PUBLIC environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
# See https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables for more info.
|
||||
####################################################################################################
|
||||
setPublicVar PROJECT_ROOT "$projectDir";
|
||||
setPublicVar CI_AIO_MIN_PWA_SCORE "95";
|
||||
# This is the branch being built; e.g. `pull/12345` for PR builds.
|
||||
setPublicVar CI_BRANCH "$CIRCLE_BRANCH";
|
||||
setPublicVar CI_BUILD_URL "$CIRCLE_BUILD_URL";
|
||||
setPublicVar CI_COMMIT "$CIRCLE_SHA1";
|
||||
# `CI_COMMIT_RANGE` is only used on push builds (a.k.a. non-PR, non-scheduled builds and rerun
|
||||
# workflows of such builds).
|
||||
setPublicVar CI_COMMIT_RANGE "$CIRCLE_GIT_BASE_REVISION..$CIRCLE_GIT_REVISION";
|
||||
setPublicVar CI_PULL_REQUEST "${CIRCLE_PR_NUMBER:-false}";
|
||||
setPublicVar CI_REPO_NAME "$CIRCLE_PROJECT_REPONAME";
|
||||
setPublicVar CI_REPO_OWNER "$CIRCLE_PROJECT_USERNAME";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define "lazy" PUBLIC environment variables for CircleCI.
|
||||
# (I.e. functions to set an environment variable when called.)
|
||||
####################################################################################################
|
||||
createPublicVarSetter CI_STABLE_BRANCH "\$(npm info @angular/core dist-tags.latest | sed -r 's/^\\s*([0-9]+\\.[0-9]+)\\.[0-9]+.*$/\\1.x/')";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SECRET environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setSecretVar CI_SECRET_AIO_DEPLOY_FIREBASE_TOKEN "$AIO_DEPLOY_TOKEN";
|
||||
setSecretVar CI_SECRET_PAYLOAD_FIREBASE_TOKEN "$ANGULAR_PAYLOAD_TOKEN";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SauceLabs environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setPublicVar SAUCE_USERNAME "angular-framework";
|
||||
setSecretVar SAUCE_ACCESS_KEY "0c731274ed5f-cbc9-16f4-021a-9835e39f";
|
||||
# TODO(josephperrott): Remove environment variables once all saucelabs tests are via bazel method.
|
||||
setPublicVar SAUCE_LOG_FILE /tmp/angular/sauce-connect.log
|
||||
setPublicVar SAUCE_READY_FILE /tmp/angular/sauce-connect-ready-file.lock
|
||||
setPublicVar SAUCE_PID_FILE /tmp/angular/sauce-connect-pid-file.lock
|
||||
setPublicVar SAUCE_TUNNEL_IDENTIFIER "angular-framework-${CIRCLE_BUILD_NUM}-${CIRCLE_NODE_INDEX}"
|
||||
# Amount of seconds we wait for sauceconnect to establish a tunnel instance. In order to not
|
||||
# acquire CircleCI instances for too long if sauceconnect failed, we need a connect timeout.
|
||||
setPublicVar SAUCE_READY_FILE_TIMEOUT 120
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define environment variables for the `angular/components` repo unit tests job.
|
||||
####################################################################################################
|
||||
# We specifically use a directory within "/tmp" here because we want the cloned repo to be
|
||||
# completely isolated from angular/angular in order to avoid any bad interactions between
|
||||
# their separate build setups. **NOTE**: When updating the temporary directory, also update
|
||||
# the `save_cache` path configuration in `config.yml`
|
||||
setPublicVar COMPONENTS_REPO_TMP_DIR "/tmp/angular-components-repo"
|
||||
setPublicVar COMPONENTS_REPO_URL "https://github.com/angular/components.git"
|
||||
setPublicVar COMPONENTS_REPO_BRANCH "master"
|
||||
# **NOTE**: When updating the commit SHA, also update the cache key in the CircleCI `config.yml`.
|
||||
setPublicVar COMPONENTS_REPO_COMMIT "598db096e668aa7e9debd56eedfd127b7a55e371"
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir -p "$HOME/.config/gcloud";
|
||||
openssl aes-256-cbc -d -in "${projectDir}/.circleci/gcp_token" \
|
||||
-md md5 -k "$CIRCLE_PROJECT_REPONAME" -out "$HOME/.config/gcloud/application_default_credentials.json"
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
cp "${projectDir}/.circleci/bazel.linux.rc" "$HOME/.bazelrc";
|
||||
|
||||
####################################################################################################
|
||||
# Create shell script in /tmp for Bazel actions to access CI envs without
|
||||
# busting the cache. Used by payload-size.sh script in integration tests.
|
||||
####################################################################################################
|
||||
readonly bazelVarEnv="/tmp/bazel-ci-env.sh"
|
||||
echo "# Setup by /.circle/env.sh" > $bazelVarEnv
|
||||
echo "export PROJECT_ROOT=\"${PROJECT_ROOT}\";" >> $bazelVarEnv
|
||||
echo "export CI_BRANCH=\"${CI_BRANCH}\";" >> $bazelVarEnv
|
||||
echo "export CI_BUILD_URL=\"${CI_BUILD_URL}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT=\"${CI_COMMIT}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT_RANGE=\"${CI_COMMIT_RANGE}\";" >> $bazelVarEnv
|
||||
echo "export CI_PULL_REQUEST=\"${CI_PULL_REQUEST}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_NAME=\"${CI_REPO_NAME}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_OWNER=\"${CI_REPO_OWNER}\";" >> $bazelVarEnv
|
||||
echo "export CI_SECRET_PAYLOAD_FIREBASE_TOKEN=\"${CI_SECRET_PAYLOAD_FIREBASE_TOKEN}\";" >> $bazelVarEnv
|
||||
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
## Source `$BASH_ENV` to make the variables available immediately. ##
|
||||
## ***NOTE: This must remain the the last action in this script*** ##
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
source $BASH_ENV;
|
Binary file not shown.
Binary file not shown.
@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Install bazel remote cache proxy
|
||||
# This is temporary until the feature is no longer experimental on CircleCI.
|
||||
# See remote cache documentation in /docs/BAZEL.md
|
||||
|
||||
set -u -e
|
||||
|
||||
readonly DOWNLOAD_URL="https://5-116431813-gh.circle-artifacts.com/0/pkg/bazel-remote-proxy-$(uname -s)_$(uname -m)"
|
||||
|
||||
curl --fail -o ~/bazel-remote-proxy "$DOWNLOAD_URL"
|
||||
chmod +x ~/bazel-remote-proxy
|
@ -1,107 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Usage (cli):
|
||||
* ```
|
||||
* node create-preview <build-number> <job-name> <webhook-url>
|
||||
* ```
|
||||
*
|
||||
* Usage (JS):
|
||||
* ```js
|
||||
* require('./trigger-webhook').
|
||||
* triggerWebhook(buildNumber, jobName, webhookUrl).
|
||||
* then(...);
|
||||
* ```
|
||||
*
|
||||
* Triggers a notification webhook with CircleCI specific info.
|
||||
*
|
||||
* It can be used for notifying external servers and trigger operations based on CircleCI job status
|
||||
* (e.g. triggering the creation of a preview based on previously stored build atrifacts).
|
||||
*
|
||||
* The body of the sent payload is of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "payload": {
|
||||
* "build_num": ${buildNumber}
|
||||
* "build_parameters": {
|
||||
* "CIRCLE_JOB": "${jobName}"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* When used from JS, it returns a promise which resolves to an object of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "statucCode": ${statusCode},
|
||||
* "responseText": "${responseText}"
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* NOTE:
|
||||
* - When used from the cli, the command will exit with an error code if the response's status code
|
||||
* is outside the [200, 400) range.
|
||||
* - When used from JS, the returned promise will be resolved, even if the response's status code is
|
||||
* outside the [200, 400) range. It is up to the caller to decide how this should be handled.
|
||||
*/
|
||||
|
||||
// Imports
|
||||
const {request} = require('https');
|
||||
|
||||
// Exports
|
||||
module.exports = {
|
||||
triggerWebhook,
|
||||
};
|
||||
|
||||
// Run
|
||||
if (require.resolve === module) {
|
||||
_main(process.argv.slice(2));
|
||||
}
|
||||
|
||||
// Helpers
|
||||
function _main(args) {
|
||||
triggerWebhook(...args).
|
||||
then(({statusCode, responseText}) => (200 <= statusCode && statusCode < 400) ?
|
||||
console.log(`Status: ${statusCode}\n${responseText}`) :
|
||||
Promise.reject(new Error(`Request failed (status: ${statusCode}): ${responseText}`))).
|
||||
catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function postJson(url, data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const opts = {method: 'post', headers: {'Content-Type': 'application/json'}};
|
||||
const onResponse = res => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
let responseText = '';
|
||||
|
||||
res.
|
||||
on('error', reject).
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => resolve({statusCode, responseText}));
|
||||
};
|
||||
|
||||
request(url, opts, onResponse).
|
||||
on('error', reject).
|
||||
end(JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
async function triggerWebhook(buildNumber, jobName, webhookUrl) {
|
||||
if (!buildNumber || !jobName || !webhookUrl || isNaN(buildNumber)) {
|
||||
throw new Error(
|
||||
'Missing or invalid arguments.\n' +
|
||||
'Expected: buildNumber (number), jobName (string), webhookUrl (string)');
|
||||
}
|
||||
|
||||
const data = {
|
||||
payload: {
|
||||
build_num: +buildNumber,
|
||||
build_parameters: {CIRCLE_JOB: jobName},
|
||||
},
|
||||
};
|
||||
|
||||
return postJson(webhookUrl, data);
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
# Install Bazel pre-reqs on Windows
|
||||
# https://docs.bazel.build/versions/master/install-windows.html
|
||||
# https://docs.bazel.build/versions/master/windows.html
|
||||
# Install MSYS2 and packages
|
||||
choco install msys2 --version 20180531.0.0 --no-progress --package-parameters "/NoUpdate"
|
||||
C:\tools\msys64\usr\bin\bash.exe -l -c "pacman --needed --noconfirm -S zip unzip patch diffutils git"
|
||||
|
||||
# Add PATH modifications to the Powershell profile. This is the win equivalent of .bash_profile.
|
||||
# https://docs.microsoft.com/en-us/previous-versions//bb613488(v=vs.85)
|
||||
new-item -path $profile -itemtype file -force
|
||||
# Paths for nodejs, npm, yarn, and msys2. Use single quotes to prevent interpolation.
|
||||
# Add before the original path to use msys2 instead of the installed gitbash.
|
||||
Add-Content $profile '$Env:path = "${Env:ProgramFiles}\nodejs\;C:\Users\circleci\AppData\Roaming\npm\;${Env:ProgramFiles(x86)}\Yarn\bin\;C:\Users\circleci\AppData\Local\Yarn\bin\;C:\tools\msys64\usr\bin\;" + $Env:path'
|
||||
# Environment variables for Bazel
|
||||
Add-Content $profile '$Env:BAZEL_SH = "C:\tools\msys64\usr\bin\bash.exe"'
|
||||
|
||||
# Get the bazel version devdep and store it in a global var for use in the circleci job.
|
||||
$bazelVersion = & ${Env:ProgramFiles}\nodejs\node.exe -e "console.log(require('./package.json').devDependencies['@bazel/bazel'])"
|
||||
# This is a tricky situation: we want $bazelVersion to be evaluated but not $Env:BAZEL_VERSION.
|
||||
# Formatting works https://stackoverflow.com/questions/32127583/expand-variable-inside-single-quotes
|
||||
$bazelVersionGlobalVar = '$Env:BAZEL_VERSION = "{0}"' -f $bazelVersion
|
||||
Add-Content $profile $bazelVersionGlobalVar
|
||||
|
||||
# Remove the CircleCI checkout SSH override, because it breaks cloning repositories through Bazel.
|
||||
# See https://circleci.com/gh/angular/angular/401454 for an example.
|
||||
# TODO: is this really needed? Maybe there's a better way. It doesn't happen on Linux or on Codefresh.
|
||||
git config --global --unset url.ssh://git@github.com.insteadOf
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir ${env:APPDATA}\gcloud
|
||||
openssl aes-256-cbc -d -in .circleci\gcp_token -md md5 -out "$env:APPDATA\gcloud\application_default_credentials.json" -k "$env:CIRCLE_PROJECT_REPONAME"
|
||||
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
copy .circleci\bazel.windows.rc ${Env:USERPROFILE}\.bazelrc
|
||||
|
||||
####################################################################################################
|
||||
# Install specific version of node.
|
||||
####################################################################################################
|
||||
choco install nodejs --version 12.14.1 --no-progress
|
||||
|
||||
# These Bazel prereqs aren't needed because the CircleCI image already includes them.
|
||||
# choco install yarn --version 1.16.0 --no-progress
|
||||
# choco install vcredist2015 --version 14.0.24215.20170201
|
||||
|
||||
# We don't need VS Build Tools for the tested bazel targets.
|
||||
# If it's needed again, uncomment these lines.
|
||||
# VS Build Tools are needed for Bazel C++ targets (like com_google_protobuf)
|
||||
# choco install visualstudio2019buildtools --version 16.1.2.0 --no-progress --package-parameters "--add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --add Microsoft.Component.VC.Runtime.UCRTSDK --add Microsoft.VisualStudio.Component.Windows10SDK.17763"
|
||||
# Add-Content $profile '$Env:BAZEL_VC = "${Env:ProgramFiles(x86)}\Microsoft Visual Studio\2019\BuildTools\VC\"'
|
||||
# Python is needed for Bazel Python targets
|
||||
# choco install python --version 3.5.1 --no-progress
|
@ -1,31 +0,0 @@
|
||||
# VSCode Remote Development - Developing inside a Containers
|
||||
|
||||
This folder contains configuration files that can be used to opt into working on this repository in a [Docker container](https://www.docker.com/resources/what-container) via [VSCode](https://code.visualstudio.com/)'s Remote Development feature (see below).
|
||||
|
||||
Info on remote development and developing inside a container with VSCode:
|
||||
- [VSCode: Remote Development](https://code.visualstudio.com/docs/remote/remote-overview)
|
||||
- [VSCode: Developing inside a Container](https://code.visualstudio.com/docs/remote/containers)
|
||||
- [VSCode: Remote Development FAQ](https://code.visualstudio.com/docs/remote/faq)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
_Prerequisite: [Install Docker](https://docs.docker.com/install) on your local environment._
|
||||
|
||||
To get started, read and follow the instuctions in [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The [.devcontainer/](.) directory contains pre-configured `devcontainer.json` and `Dockerfile` files, which you can use to set up remote development with a docker container.
|
||||
|
||||
In a nutshell, you need to:
|
||||
- Install the [Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension.
|
||||
- Copy [recommended-Dockerfile](./recommended-Dockerfile) to `Dockerfile` (and optionally tweak to suit your needs).
|
||||
- Copy [recommended-devcontainer.json](./recommended-devcontainer.json) to `devcontainer.json` (and optionally tweak to suit your needs).
|
||||
- Open VSCode and bring up the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
- Type `Remote-Containers: Open Folder in Container` and choose your local clone of [angular/angular](https://github.com/angular/angular).
|
||||
|
||||
The `.devcontainer/devcontainer.json` and `.devcontainer/Dockerfile` files are ignored by git, so you can have your own local versions. We may occasionally update the template files ([recommended-devcontainer.json](./recommended-devcontainer.json), [recommended-Dockerfile](./recommended-Dockerfile)), in which case you will need to manually update your local copies (if desired).
|
||||
|
||||
|
||||
## Updating `recommended-devcontainer.json` and `recommended-Dockerfile`
|
||||
|
||||
You can update and commit the recommended config files (which people use as basis for their local configs), if you find that something is broken, out-of-date or can be improved.
|
||||
|
||||
Please, keep in mind that any changes you make will potentially be used by many people on different environments. Try to keep these config files cross-platform compatible and free of personal preferences.
|
@ -1,24 +0,0 @@
|
||||
# Image metadata and config.
|
||||
FROM circleci/node:10-browsers # Ideally, the image version should be what we use on CI.
|
||||
# See `executors > browsers-executor` in `.circleci/config.yml`.
|
||||
|
||||
LABEL name="Angular dev environment" \
|
||||
description="This image can be used to create a dev environment for building Angular." \
|
||||
vendor="angular" \
|
||||
version="1.0"
|
||||
|
||||
EXPOSE 4000 4200 4433 5000 8080 9876
|
||||
|
||||
|
||||
# Switch to `root` (CircleCI images use `circleci` as the user).
|
||||
USER root
|
||||
|
||||
|
||||
# Configure `Node.js`/`npm` and install utilities.
|
||||
RUN npm config --global set user root
|
||||
RUN npm install --global yarn@latest # Ideally, the version should be what we use on CI.
|
||||
# See `commands > overwrite_yarn` in `.circleci/config.yml`.
|
||||
|
||||
|
||||
# Go! (And keep going.)
|
||||
CMD ["tail", "--follow", "/dev/null"]
|
@ -1,16 +0,0 @@
|
||||
// Reference: https://code.visualstudio.com/docs/remote/containers#_devcontainerjson-reference
|
||||
{
|
||||
"name": "Angular dev container",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": [4000, 4200, 4433, 5000, 8080, 9876],
|
||||
"postCreateCommand": "yarn install",
|
||||
"extensions": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
# https://editorconfig.org
|
||||
# http://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
|
9
.gitattributes
vendored
9
.gitattributes
vendored
@ -1,12 +1,5 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
|
||||
# JS and TS files must always use LF for tools to work
|
||||
# JS files must always use LF for tools to work
|
||||
*.js eol=lf
|
||||
*.ts eol=lf
|
||||
|
||||
# API guardian patch must always use LF for tests to work
|
||||
*.patch eol=lf
|
||||
|
||||
# Must keep Windows line ending to be parsed correctly
|
||||
scripts/windows/packages.txt eol=crlf
|
||||
|
40
.github/ISSUE_TEMPLATE.md
vendored
40
.github/ISSUE_TEMPLATE.md
vendored
@ -1,10 +1,38 @@
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
**Note: for support questions, please use one of these channels:** https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question. This repository's issues are reserved for feature requests and bug reports.
|
||||
|
||||
Please help us process issues more efficiently by filing an
|
||||
issue using one of the following templates:
|
||||
* **I'm submitting a ... **
|
||||
[ ] bug report
|
||||
[ ] feature request
|
||||
[ ] support request => Please do not submit support request here, see note at the top of this template.
|
||||
|
||||
https://github.com/angular/angular/issues/new/choose
|
||||
|
||||
Thank you!
|
||||
* **Do you want to request a *feature* or report a *bug*?**
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
|
||||
* **What is the current behavior?**
|
||||
|
||||
|
||||
|
||||
* **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** via
|
||||
https://plnkr.co or similar (you can use this template as a starting point: http://plnkr.co/edit/tpl:AvJOMERrnz94ekVua0u5).
|
||||
|
||||
|
||||
|
||||
* **What is the expected behavior?**
|
||||
|
||||
|
||||
|
||||
* **What is the motivation / use case for changing the behavior?**
|
||||
|
||||
|
||||
|
||||
* **Please tell us about your environment:**
|
||||
|
||||
- Angular version: 2.0.0-beta.X
|
||||
- Browser: [all | Chrome XX | Firefox XX | IE XX | Safari XX | Mobile Chrome XX | Android X.X Web Browser | iOS XX Safari | iOS XX UIWebView | iOS XX WKWebView ]
|
||||
- Language: [all | TypeScript X.X | ES6/7 | ES5 | Dart]
|
||||
|
||||
|
||||
|
||||
* **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow, gitter, etc)
|
||||
|
69
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
69
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
@ -1,69 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41EBug report"
|
||||
about: Report a bug in the Angular Framework
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
|
||||
# 🐞 bug report
|
||||
|
||||
### Affected Package
|
||||
<!-- Can you pin-point one or more @angular/* packages as the source of the bug? -->
|
||||
<!-- ✍️edit: --> The issue is caused by package @angular/....
|
||||
|
||||
|
||||
### Is this a regression?
|
||||
|
||||
<!-- Did this behavior use to work in the previous version? -->
|
||||
<!-- ✍️--> Yes, the previous version in which this bug was not present was: ....
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️--> A clear and concise description of the problem...
|
||||
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
<!--
|
||||
Please create and share minimal reproduction of the issue starting with this template: https://stackblitz.com/fork/angular-issue-repro2
|
||||
-->
|
||||
<!-- ✍️--> https://stackblitz.com/...
|
||||
|
||||
<!--
|
||||
If StackBlitz is not suitable for reproduction of your issue, please create a minimal GitHub repository with the reproduction of the issue.
|
||||
A good way to make a minimal reproduction is to create a new app via `ng new repro-app` and add the minimum possible code to show the problem.
|
||||
Share the link to the repo below along with step-by-step instructions to reproduce the problem, as well as expected and actual behavior.
|
||||
|
||||
Issues that don't have enough info and can't be reproduced will be closed.
|
||||
|
||||
You can read more about issue submission guidelines here: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#-submitting-an-issue
|
||||
-->
|
||||
|
||||
## 🔥 Exception or Error
|
||||
<pre><code>
|
||||
<!-- If the issue is accompanied by an exception or an error, please share it below: -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
## 🌍 Your Environment
|
||||
|
||||
**Angular Version:**
|
||||
<pre><code>
|
||||
<!-- run `ng version` and paste output below -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
**Anything else relevant?**
|
||||
<!-- ✍️Is this a browser specific issue? If so, please specify the browser and version. -->
|
||||
|
||||
<!-- ✍️Do any of these matter: operating system, IDE, package manager, HTTP server, ...? If so, please mention it below. -->
|
32
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
32
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
@ -1,32 +0,0 @@
|
||||
---
|
||||
name: "\U0001F680Feature request"
|
||||
about: Suggest a feature for Angular Framework
|
||||
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
|
||||
# 🚀 feature request
|
||||
|
||||
### Relevant Package
|
||||
<!-- Can you pin-point one or more @angular/* packages the are relevant for this feature request? -->
|
||||
<!-- ✍️edit: --> This feature request is for @angular/....
|
||||
|
||||
|
||||
### Description
|
||||
<!-- ✍️--> A clear and concise description of the problem or missing capability...
|
||||
|
||||
|
||||
### Describe the solution you'd like
|
||||
<!-- ✍️--> If you have a solution in mind, please describe it.
|
||||
|
||||
|
||||
### Describe alternatives you've considered
|
||||
<!-- ✍️--> Have you considered any alternative solutions or workarounds?
|
55
.github/ISSUE_TEMPLATE/3-docs-bug.md
vendored
55
.github/ISSUE_TEMPLATE/3-docs-bug.md
vendored
@ -1,55 +0,0 @@
|
||||
---
|
||||
name: "📚 Docs or angular.io issue report"
|
||||
about: Report an issue in Angular's documentation or angular.io application
|
||||
|
||||
---
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Oh hi there! 😄
|
||||
|
||||
To expedite issue processing please search open and closed issues before submitting a new one.
|
||||
Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
# 📚 Docs or angular.io bug report
|
||||
|
||||
### Description
|
||||
|
||||
<!-- ✍️edit:--> A clear and concise description of the problem...
|
||||
|
||||
|
||||
## 🔬 Minimal Reproduction
|
||||
|
||||
### What's the affected URL?**
|
||||
<!-- ✍️edit:--> https://angular.io/...
|
||||
|
||||
### Reproduction Steps**
|
||||
<!-- If applicable please list the steps to take to reproduce the issue -->
|
||||
<!-- ✍️edit:-->
|
||||
|
||||
### Expected vs Actual Behavior**
|
||||
<!-- If applicable please describe the difference between the expected and actual behavior after following the repro steps. -->
|
||||
<!-- ✍️edit:-->
|
||||
|
||||
|
||||
## 📷Screenshot
|
||||
<!-- Often a screenshot can help to capture the issue better than a long description. -->
|
||||
<!-- ✍️upload a screenshot:-->
|
||||
|
||||
|
||||
## 🔥 Exception or Error
|
||||
<pre><code>
|
||||
<!-- If the issue is accompanied by an exception or an error, please share it below: -->
|
||||
<!-- ✍️-->
|
||||
|
||||
</code></pre>
|
||||
|
||||
|
||||
## 🌍 Your Environment
|
||||
|
||||
### Browser info
|
||||
<!-- ✍️Is this a browser specific issue? If so, please specify the device, browser, and version. -->
|
||||
|
||||
### Anything else relevant?
|
||||
<!-- ✍️Please provide additional info if necessary. -->
|
@ -1,11 +0,0 @@
|
||||
---
|
||||
name: ⚠️ Security issue disclosure
|
||||
about: Report a security issue in Angular Framework, Material, or CLI
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please read https://angular.io/guide/security#report-issues on how to disclose security related issues.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
16
.github/ISSUE_TEMPLATE/5-support-request.md
vendored
16
.github/ISSUE_TEMPLATE/5-support-request.md
vendored
@ -1,16 +0,0 @@
|
||||
---
|
||||
name: "❓Support request"
|
||||
about: Questions and requests for support
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please do not file questions or support requests on the GitHub issues tracker.
|
||||
|
||||
You can get your questions answered using other communication channels. Please see:
|
||||
https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question
|
||||
|
||||
Thank you!
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
13
.github/ISSUE_TEMPLATE/6-angular-cli.md
vendored
13
.github/ISSUE_TEMPLATE/6-angular-cli.md
vendored
@ -1,13 +0,0 @@
|
||||
---
|
||||
name: "\U0001F6E0️Angular CLI"
|
||||
about: Issues and feature requests for Angular CLI
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please file any Angular CLI issues at: https://github.com/angular/angular-cli/issues/new
|
||||
|
||||
For the time being, we keep Angular CLI issues in a separate repository.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
13
.github/ISSUE_TEMPLATE/7-angular-components.md
vendored
13
.github/ISSUE_TEMPLATE/7-angular-components.md
vendored
@ -1,13 +0,0 @@
|
||||
---
|
||||
name: "\U0001F48EAngular Components"
|
||||
about: Issues and feature requests for Angular Components
|
||||
|
||||
---
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
Please file any Angular Components issues at: https://github.com/angular/components/issues/new
|
||||
|
||||
For the time being, we keep Angular Components issues in a separate repository.
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
35
.github/PULL_REQUEST_TEMPLATE.md
vendored
35
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,43 +1,24 @@
|
||||
## PR Checklist
|
||||
Please check if your PR fulfills the following requirements:
|
||||
|
||||
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
||||
* **Please check if the PR fulfills these requirements**
|
||||
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit-message-format
|
||||
- [ ] Tests for the changes have been added (for bug fixes / features)
|
||||
- [ ] Docs have been added / updated (for bug fixes / features)
|
||||
|
||||
|
||||
## PR Type
|
||||
What kind of change does this PR introduce?
|
||||
|
||||
<!-- Please check the one that applies to this PR using "x". -->
|
||||
|
||||
- [ ] Bugfix
|
||||
- [ ] Feature
|
||||
- [ ] Code style update (formatting, local variables)
|
||||
- [ ] Refactoring (no functional changes, no api changes)
|
||||
- [ ] Build related changes
|
||||
- [ ] CI related changes
|
||||
- [ ] Documentation content changes
|
||||
- [ ] angular.io application / infrastructure changes
|
||||
- [ ] Other... Please describe:
|
||||
* **What kind of change does this PR introduce?** (Bug fix, feature, docs update, ...)
|
||||
|
||||
|
||||
## What is the current behavior?
|
||||
<!-- Please describe the current behavior that you are modifying, or link to a relevant issue. -->
|
||||
|
||||
Issue Number: N/A
|
||||
* **What is the current behavior?** (You can also link to an open issue here)
|
||||
|
||||
|
||||
## What is the new behavior?
|
||||
|
||||
* **What is the new behavior (if this is a feature change)?**
|
||||
|
||||
|
||||
## Does this PR introduce a breaking change?
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
* **Does this PR introduce a breaking change?** (What changes might users need to make in their application due to this PR?)
|
||||
|
||||
|
||||
<!-- If this PR contains a breaking change, please describe the impact and migration path for existing applications below. -->
|
||||
|
||||
* **Other information**:
|
||||
|
||||
## Other information
|
||||
|
183
.github/angular-robot.yml
vendored
183
.github/angular-robot.yml
vendored
@ -1,183 +0,0 @@
|
||||
# Configuration for angular-robot
|
||||
|
||||
#options for the size plugin
|
||||
size:
|
||||
disabled: false
|
||||
maxSizeIncrease: 2000
|
||||
circleCiStatusName: "ci/circleci: test_ivy_aot"
|
||||
|
||||
# options for the merge plugin
|
||||
merge:
|
||||
# the status will be added to your pull requests
|
||||
status:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the name of the status
|
||||
context: "ci/angular: merge status"
|
||||
# text to show when all checks pass
|
||||
successText: "All checks passed!"
|
||||
# text to show when some checks are failing
|
||||
failureText: "The following checks are failing:"
|
||||
|
||||
# the g3 status will be added to your pull requests if they include files that match the patterns
|
||||
g3Status:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the name of the status
|
||||
context: "google3"
|
||||
# text to show when the status is pending, {{PRNumber}} will be replaced by the PR number
|
||||
pendingDesc: "Googler: run g3sync presubmit {{PRNumber}}"
|
||||
# text to show when the status is success
|
||||
successDesc: "Does not affect google3"
|
||||
# link to use for the details
|
||||
url: "http://go/angular/g3sync"
|
||||
# list of patterns to check for the files changed by the PR
|
||||
# this list must be manually kept in sync with google3/third_party/javascript/angular2/copy.bara.sky
|
||||
include:
|
||||
- "LICENSE"
|
||||
- "modules/benchmarks/**"
|
||||
- "modules/system.d.ts"
|
||||
- "packages/**"
|
||||
# list of patterns to ignore for the files changed by the PR
|
||||
exclude:
|
||||
- "packages/*"
|
||||
- "packages/bazel/*"
|
||||
- "packages/bazel/src/api-extractor/**"
|
||||
- "packages/bazel/src/builders/**"
|
||||
- "packages/bazel/src/ng_package/**"
|
||||
- "packages/bazel/src/protractor/**"
|
||||
- "packages/bazel/src/schematics/**"
|
||||
- "packages/compiler-cli/ngcc/**"
|
||||
- "packages/docs/**"
|
||||
- "packages/elements/schematics/**"
|
||||
- "packages/examples/**"
|
||||
- "packages/language-service/**"
|
||||
- "packages/localize/**"
|
||||
- "packages/private/**"
|
||||
- "packages/service-worker/**"
|
||||
- "**/.gitignore"
|
||||
- "**/.gitkeep"
|
||||
- "**/yarn.lock"
|
||||
- "**/package.json"
|
||||
- "**/third_party/**"
|
||||
- "**/tsconfig-build.json"
|
||||
- "**/tsconfig.json"
|
||||
- "**/rollup.config.js"
|
||||
- "**/BUILD.bazel"
|
||||
- "**/*.md"
|
||||
- "packages/**/integrationtest/**"
|
||||
- "packages/**/test/**"
|
||||
- "packages/zone.js/*"
|
||||
- "packages/zone.js/doc/**"
|
||||
- "packages/zone.js/example/**"
|
||||
- "packages/zone.js/scripts/**"
|
||||
|
||||
# comment that will be added to a PR when there is a conflict, leave empty or set to false to disable
|
||||
mergeConflictComment: "Hi @{{PRAuthor}}! This PR has merge conflicts due to recent upstream merges.
|
||||
\nPlease help to unblock it by resolving these conflicts. Thanks!"
|
||||
|
||||
# label to monitor
|
||||
mergeLabel: "PR action: merge"
|
||||
|
||||
# adding any of these labels will also add the merge label
|
||||
mergeLinkedLabels:
|
||||
- "PR action: merge-assistance"
|
||||
|
||||
# list of checks that will determine if the merge label can be added
|
||||
checks:
|
||||
|
||||
# require that the PR has reviews from all requested reviewers
|
||||
#
|
||||
# This enables us to request reviews from both eng and tech writers, or multiple eng folks, and prevents accidental merges.
|
||||
# Rather than merging PRs with pending reviews, if all approvals are obtained and additional reviews are not needed, any pending reviewers should be removed via GitHub UI (this also leaves an audit trail behind these decisions).
|
||||
requireReviews: true,
|
||||
|
||||
# whether the PR shouldn't have a conflict with the base branch
|
||||
noConflict: true
|
||||
# list of labels that a PR needs to have, checked with a regexp (e.g. "PR target:" will work for the label "PR target: master")
|
||||
requiredLabels:
|
||||
- "PR target: *"
|
||||
- "cla: yes"
|
||||
|
||||
# list of labels that a PR shouldn't have, checked after the required labels with a regexp
|
||||
forbiddenLabels:
|
||||
- "PR target: TBD"
|
||||
- "PR action: cleanup"
|
||||
- "PR action: review"
|
||||
- "PR state: blocked"
|
||||
- "cla: no"
|
||||
|
||||
# list of PR statuses that need to be successful
|
||||
requiredStatuses:
|
||||
- "ci/circleci: build"
|
||||
- "ci/circleci: lint"
|
||||
- "ci/circleci: publish_snapshot"
|
||||
- "ci/angular: size"
|
||||
- "cla/google"
|
||||
- "google3"
|
||||
- "pullapprove"
|
||||
|
||||
|
||||
# the comment that will be added when the merge label is added despite failing checks, leave empty or set to false to disable
|
||||
# {{MERGE_LABEL}} will be replaced by the value of the mergeLabel option
|
||||
# {{PLACEHOLDER}} will be replaced by the list of failing checks
|
||||
mergeRemovedComment: "I see that you just added the `{{MERGE_LABEL}}` label, but the following checks are still failing:
|
||||
\n{{PLACEHOLDER}}
|
||||
\n
|
||||
\n**If you want your PR to be merged, it has to pass all the CI checks.**
|
||||
\n
|
||||
\nIf you can't get the PR to a green state due to flakes or broken master, please try rebasing to master and/or restarting the CI job. If that fails and you believe that the issue is not due to your change, please contact the caretaker and ask for help."
|
||||
|
||||
# options for the triage plugin
|
||||
triage:
|
||||
# number of the milestone to apply when the issue has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the issue is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if an issue has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if an issue has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: bug/fix"
|
||||
- "severity*"
|
||||
- "freq*"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: feature"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: refactor"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: RFC / Discussion / question"
|
||||
- "comp: *"
|
||||
|
||||
# options for the triage PR plugin
|
||||
triagePR:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# number of the milestone to apply when the PR has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the PR is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if a PR has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if a PR has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: *"
|
||||
- "effort*"
|
||||
- "risk*"
|
||||
- "comp: *"
|
||||
|
||||
# options for rerunning CI
|
||||
rerunCircleCI:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the label which when added triggers a rerun of the default CircleCI workflow
|
||||
triggerRerunLabel: "PR action: rerun CI at HEAD"
|
15
.github/workflows/lock-closed.yml
vendored
15
.github/workflows/lock-closed.yml
vendored
@ -1,15 +0,0 @@
|
||||
name: Lock closed inactive issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 16:00 every day
|
||||
- cron: '0 16 * * *'
|
||||
|
||||
jobs:
|
||||
lock_closed:
|
||||
if: github.repository == 'angular/angular'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: angular/dev-infra/github-actions/lock-closed@66462f6
|
||||
with:
|
||||
lock-bot-key: ${{ secrets.LOCK_BOT_PRIVATE_KEY }}
|
52
.gitignore
vendored
52
.gitignore
vendored
@ -1,45 +1,51 @@
|
||||
.DS_STORE
|
||||
|
||||
# Don’t commit the following directories created by pub.
|
||||
packages
|
||||
pubspec.lock
|
||||
.pub
|
||||
.packages
|
||||
|
||||
/dist/
|
||||
/bazel-out
|
||||
/integration/bazel/bazel-*
|
||||
*.log
|
||||
.buildlog
|
||||
node_modules
|
||||
tools/gulp-tasks/cldr/cldr-data/
|
||||
bower_components
|
||||
|
||||
# Or broccoli working directory
|
||||
tmp
|
||||
|
||||
# Or the files created by dart2js.
|
||||
*.dart.js
|
||||
*.dart.precompiled.js
|
||||
*.js_
|
||||
*.js.deps
|
||||
*.js.map
|
||||
|
||||
# Or type definitions we mirror from github
|
||||
# (NB: these lines are removed in publish-build-artifacts.sh)
|
||||
**/typings/**/*.d.ts
|
||||
**/typings/tsd.cached.json
|
||||
|
||||
# Include when developing application packages.
|
||||
pubspec.lock
|
||||
.c9
|
||||
.idea/
|
||||
.devcontainer/*
|
||||
!.devcontainer/README.md
|
||||
!.devcontainer/recommended-devcontainer.json
|
||||
!.devcontainer/recommended-Dockerfile
|
||||
.settings/
|
||||
.vscode/launch.json
|
||||
.vscode/settings.json
|
||||
.vscode/tasks.json
|
||||
*.swo
|
||||
modules/.settings
|
||||
.vscode
|
||||
modules/.vscode
|
||||
.vimrc
|
||||
.nvimrc
|
||||
|
||||
# Don't check in secret files
|
||||
*secret.js
|
||||
|
||||
# Ignore npm/yarn debug log
|
||||
# Ignore npm debug log
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
/docs/bower_components/
|
||||
|
||||
# build-analytics
|
||||
.build-analytics
|
||||
|
||||
# rollup-test output
|
||||
/modules/rollup-test/dist/
|
||||
|
||||
# User specific bazel settings
|
||||
.bazelrc.user
|
||||
|
||||
.notes.md
|
||||
baseline.json
|
||||
# built dart payload tests
|
||||
/modules_dart/payload/**/build
|
||||
|
1077
.pullapprove.yml
1077
.pullapprove.yml
File diff suppressed because it is too large
Load Diff
137
.travis.yml
Normal file
137
.travis.yml
Normal file
@ -0,0 +1,137 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- '5.4.1'
|
||||
|
||||
branches:
|
||||
except:
|
||||
- g3_v2_0
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.pub-cache
|
||||
- $HOME/.chrome/chromium
|
||||
|
||||
before_cache:
|
||||
# Undo the pollution of the typescript_next build before the cache is primed for future use
|
||||
- if [[ "$MODE" == "typescript_next" ]]; then npm install typescript; fi
|
||||
|
||||
env:
|
||||
global:
|
||||
# Use newer verison of GCC to that is required to compile native npm modules for Node v4+ on Ubuntu Precise
|
||||
# more info: https://docs.travis-ci.com/user/languages/javascript-with-nodejs#Node.js-v4-(or-io.js-v3)-compiler-requirements
|
||||
- CXX=g++-4.8
|
||||
- KARMA_DART_BROWSERS=DartiumWithWebPlatform
|
||||
# No sandbox mode is needed for Chromium in Travis, it crashes otherwise: https://sites.google.com/a/chromium.org/chromedriver/help/chrome-doesn-t-start
|
||||
- KARMA_JS_BROWSERS=ChromeNoSandbox
|
||||
- E2E_BROWSERS=ChromeOnTravis
|
||||
- LOGS_DIR=/tmp/angular-build/logs
|
||||
- SAUCE_USERNAME=angular-ci
|
||||
- SAUCE_ACCESS_KEY=9b988f434ff8-fbca-8aa4-4ae3-35442987
|
||||
- BROWSER_STACK_USERNAME=angularteam1
|
||||
- BROWSER_STACK_ACCESS_KEY=BWCd4SynLzdDcv8xtzsB
|
||||
- ARCH=linux-x64
|
||||
- DART_DEV_VERSION=latest
|
||||
- DART_STABLE_VERSION=latest
|
||||
- DART_CHANNEL=stable
|
||||
- DART_VERSION=$DART_STABLE_VERSION
|
||||
# Token for tsd to increase github rate limit
|
||||
# See https://github.com/DefinitelyTyped/tsd#tsdrc
|
||||
# This does not use http://docs.travis-ci.com/user/environment-variables/#Secure-Variables
|
||||
# because those are not visible for pull requests, and those should also be reliable.
|
||||
# This SSO token belongs to github account angular-github-ratelimit-token which has no access
|
||||
# (password is in Valentine)
|
||||
- TSDRC='{"token":"ef474500309daea53d5991b3079159a29520a40b"}'
|
||||
# GITHUB_TOKEN_ANGULAR
|
||||
- secure: "fq/U7VDMWO8O8SnAQkdbkoSe2X92PVqg4d044HmRYVmcf6YbO48+xeGJ8yOk0pCBwl3ISO4Q2ot0x546kxfiYBuHkZetlngZxZCtQiFT9kyId8ZKcYdXaIW9OVdw3Gh3tQyUwDucfkVhqcs52D6NZjyE2aWZ4/d1V4kWRO/LMgo="
|
||||
matrix:
|
||||
# Order: a slower build first, so that we don't occupy an idle travis worker waiting for others to complete.
|
||||
- MODE=dart
|
||||
- MODE=dart DART_CHANNEL=dev
|
||||
- MODE=saucelabs_required
|
||||
- MODE=browserstack_required
|
||||
- MODE=saucelabs_optional
|
||||
- MODE=browserstack_optional
|
||||
- MODE=dart_ddc
|
||||
- MODE=js
|
||||
- MODE=router
|
||||
- MODE=build_only
|
||||
- MODE=typescript_next
|
||||
- MODE=lint
|
||||
- MODE=payload
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- env: "MODE=saucelabs_optional"
|
||||
- env: "MODE=browserstack_optional"
|
||||
# Tracked in https://github.com/angular/angular/issues/7050
|
||||
- env: "MODE=typescript_next"
|
||||
|
||||
addons:
|
||||
firefox: "38.0"
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
- g++-4.8
|
||||
|
||||
before_install:
|
||||
- node tools/analytics/build-analytics start ci job
|
||||
- node tools/analytics/build-analytics start ci before_install
|
||||
- echo ${TSDRC} > .tsdrc
|
||||
- export CHROME_BIN=$HOME/.chrome/chromium/chrome-linux/chrome
|
||||
- export DISPLAY=:99.0
|
||||
- export GIT_SHA=$(git rev-parse HEAD)
|
||||
- ./scripts/ci/init_android.sh
|
||||
- sh -e /etc/init.d/xvfb start
|
||||
# Use a separate SauseLabs account for upstream/master builds in order for Sauce to create a badge representing the status of just upstream/master
|
||||
- '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && [ "${TRAVIS_BRANCH}" = "master" ] && SAUCE_USERNAME="angular2-ci" && SAUCE_ACCESS_KEY="693ebc16208a-0b5b-1614-8d66-a2662f4e" || true'
|
||||
- node tools/analytics/build-analytics success ci before_install
|
||||
|
||||
install:
|
||||
- node tools/analytics/build-analytics start ci install
|
||||
# Install version of npm that we are locked against
|
||||
- npm install -g npm@3.5.3
|
||||
# Install version of Chromium that we are locked against
|
||||
- ./scripts/ci/install_chromium.sh
|
||||
# Install version of Dart based on the matrix build variables
|
||||
- ./scripts/ci/install_dart.sh ${DART_CHANNEL} ${DART_VERSION} ${ARCH}
|
||||
# Print the size of caches to ease debugging
|
||||
- du -sh ./node_modules || true
|
||||
# Install npm dependecies
|
||||
# check-node-modules will exit(1) if we don't need to install
|
||||
# we need to manually kick off the postinstall script if check-node-modules exit(0)s
|
||||
- node tools/npm/check-node-modules --purge && npm install || npm run postinstall
|
||||
- node tools/analytics/build-analytics success ci install
|
||||
|
||||
before_script:
|
||||
- node tools/analytics/build-analytics start ci before_script
|
||||
- mkdir -p $LOGS_DIR
|
||||
- ./scripts/ci/presubmit-queue-setup.sh
|
||||
- node tools/analytics/build-analytics success ci before_script
|
||||
|
||||
script:
|
||||
- node tools/analytics/build-analytics start ci script
|
||||
- ./scripts/ci/build_and_test.sh ${MODE}
|
||||
- node tools/analytics/build-analytics success ci script
|
||||
|
||||
after_script:
|
||||
- node tools/analytics/build-analytics start ci after_script
|
||||
- ./scripts/ci/print-logs.sh
|
||||
- ./scripts/ci/after-script.sh
|
||||
- ./scripts/publish/publish-build-artifacts.sh
|
||||
- node tools/analytics/build-analytics success ci after_script
|
||||
- tools/analytics/build-analytics $TRAVIS_TEST_RESULT ci job
|
||||
|
||||
notifications:
|
||||
webhooks:
|
||||
urls:
|
||||
- https://webhooks.gitter.im/e/1ef62e23078036f9cee4
|
||||
# trigger Buildtime Trend Service to parse Travis CI log
|
||||
- https://buildtimetrend.herokuapp.com/travis
|
||||
- http://104.197.9.155:8484/hubot/travis/activity
|
||||
on_success: always # options: [always|never|change] default: always
|
||||
on_failure: always # options: [always|never|change] default: always
|
||||
on_start: never # default: never
|
||||
slack:
|
||||
secure: EP4MzZ8JMyNQJ4S3cd5LEPWSMjC7ZRdzt3veelDiOeorJ6GwZfCDHncR+4BahDzQAuqyE/yNpZqaLbwRWloDi15qIUsm09vgl/1IyNky1Sqc6lEknhzIXpWSalo4/T9ZP8w870EoDvM/UO+LCV99R3wS8Nm9o99eLoWVb2HIUu0=
|
25
.vscode/README.md
vendored
25
.vscode/README.md
vendored
@ -1,25 +0,0 @@
|
||||
# VSCode Configuration
|
||||
|
||||
This folder contains opt-in [Workspace Settings](https://code.visualstudio.com/docs/getstarted/settings), [Tasks](https://code.visualstudio.com/docs/editor/tasks), [Launch Configurations](https://code.visualstudio.com/Docs/editor/debugging#_launch-configurations) and [Extension Recommendations](https://code.visualstudio.com/docs/editor/extension-gallery#_workspace-recommended-extensions) that the Angular team recommends using when working on this repository.
|
||||
|
||||
## Usage
|
||||
|
||||
To use the recommended configurations follow the steps below:
|
||||
|
||||
- install the recommneded extensions in `.vscode/extensions.json`
|
||||
- copy (or link) `.vscode/recommended-settings.json` to `.vscode/settings.json`
|
||||
- copy (or link) `.vscode/recommended-launch.json` to `.vscode/launch.json`
|
||||
- copy (or link) `.vscode/recommended-tasks.json` to `.vscode/tasks.json`
|
||||
- restart the editor
|
||||
|
||||
If you already have your custom workspace settings you should instead manually merge the file contents.
|
||||
|
||||
This isn't an automatic process so you will need to repeat it when settings are updated.
|
||||
|
||||
To see the recommended extensions select "Extensions: Show Recommended Extensions" in the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
|
||||
## Editing `.vscode/recommended-*.json` files
|
||||
|
||||
If you wish to add extra configuration items please keep in mind any modifications you make here will be used by many users.
|
||||
|
||||
Try to keep these settings/configuations to things that help facilitate the development process and avoid altering the user workflow whenever possible.
|
15
.vscode/extensions.json
vendored
15
.vscode/extensions.json
vendored
@ -1,15 +0,0 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
85
.vscode/recommended-launch.json
vendored
85
.vscode/recommended-launch.json
vendored
@ -1,85 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": true,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug (no source maps)",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": false,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/acceptance",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/render3",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
]
|
||||
}
|
31
.vscode/recommended-settings.json
vendored
31
.vscode/recommended-settings.json
vendored
@ -1,31 +0,0 @@
|
||||
{
|
||||
// Format js and ts files on save with `clang-format.executable`
|
||||
// If `clang-format.executable` is not being used, these two settings should be removed otherwise it will break existing formatting.
|
||||
// You can instead run `yarn gulp format` to manually format your code.
|
||||
"[javascript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
// Please install https://marketplace.visualstudio.com/items?itemName=xaver.clang-format to take advantage of `clang-format` in VSCode.
|
||||
// (See https://clang.llvm.org/docs/ClangFormat.html for more info `clang-format`.)
|
||||
"clang-format.executable": "${workspaceRoot}/node_modules/.bin/clang-format",
|
||||
// Exclude third party modules and build artifacts from the editor watchers/searches.
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/**": true,
|
||||
"**/bazel-out/**": true,
|
||||
"**/dist/**": true,
|
||||
"**/aio/src/generated/**": true,
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
"**/bower_components": true,
|
||||
"**/bazel-out": true,
|
||||
"**/dist": true,
|
||||
"**/aio/src/generated": true,
|
||||
},
|
||||
"git.ignoreLimitWarning": true,
|
||||
}
|
113
.vscode/recommended-tasks.json
vendored
113
.vscode/recommended-tasks.json
vendored
@ -1,113 +0,0 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "IVY:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/render3",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazel",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
# Yarn Vendoring
|
||||
We utilize Yarn's `yarn-path` configuration in a shared `.yarnrc` file to enforce
|
||||
everyone using the same version of Yarn. Yarn checks the `.yarnrc` file to
|
||||
determine if yarn should delegate the command to a vendored version at the
|
||||
provided path.
|
||||
|
||||
## How to update
|
||||
To update to the latest version of Yarn as our vendored version:
|
||||
- Run this command
|
||||
```sh
|
||||
yarn policies set-version latest
|
||||
```
|
||||
- Remove the previous version
|
147315
.yarn/releases/yarn-1.21.1.js
vendored
147315
.yarn/releases/yarn-1.21.1.js
vendored
File diff suppressed because one or more lines are too long
5
.yarnrc
5
.yarnrc
@ -1,5 +0,0 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
yarn-path ".yarn/releases/yarn-1.21.1.js"
|
50
BUILD.bazel
50
BUILD.bazel
@ -1,50 +0,0 @@
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
exports_files([
|
||||
"LICENSE",
|
||||
"protractor-perf.conf.js",
|
||||
"karma-js.conf.js",
|
||||
"browser-providers.conf.js",
|
||||
"scripts/ci/track-payload-size.sh",
|
||||
"scripts/ci/payload-size.sh",
|
||||
"scripts/ci/payload-size.js",
|
||||
"package.json",
|
||||
])
|
||||
|
||||
alias(
|
||||
name = "tsconfig.json",
|
||||
actual = "//packages:tsconfig-build.json",
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "web_test_bootstrap_scripts",
|
||||
# do not sort
|
||||
srcs = [
|
||||
"@npm//:node_modules/core-js/client/core.js",
|
||||
"//packages/zone.js/dist:zone.js",
|
||||
"//packages/zone.js/dist:zone-testing.js",
|
||||
"//packages/zone.js/dist:task-tracking.js",
|
||||
"//:test-events.js",
|
||||
"//:shims_for_IE.js",
|
||||
# Including systemjs because it defines `__eval`, which produces correct stack traces.
|
||||
"@npm//:node_modules/systemjs/dist/system.src.js",
|
||||
"@npm//:node_modules/reflect-metadata/Reflect.js",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "angularjs_scripts",
|
||||
srcs = [
|
||||
# We also declare the unminfied AngularJS files since these can be used for
|
||||
# local debugging (e.g. see: packages/upgrade/test/common/test_helpers.ts)
|
||||
"@npm//:node_modules/angular/angular.js",
|
||||
"@npm//:node_modules/angular/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.min.js",
|
||||
"@npm//:node_modules/angular-mocks/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.5/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.6/angular-mocks.js",
|
||||
],
|
||||
)
|
7433
CHANGELOG.md
7433
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -1,12 +0,0 @@
|
||||
# Contributor Code of Conduct
|
||||
## Version 0.3b-angular
|
||||
|
||||
As contributors and maintainers of the Angular project, we pledge to respect everyone who contributes by posting issues, updating documentation, submitting pull requests, providing feedback in comments, and any other activities.
|
||||
|
||||
Communication through any of Angular's channels (GitHub, Gitter, IRC, mailing lists, Google+, Twitter, etc.) must be constructive and never resort to personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
|
||||
|
||||
We promise to extend courtesy and respect to everyone involved in this project regardless of gender, gender identity, sexual orientation, disability, age, race, ethnicity, religion, or level of experience. We expect anyone contributing to the Angular project to do the same.
|
||||
|
||||
If any member of the community violates this code of conduct, the maintainers of the Angular project may take action, removing issues, comments, and PRs or blocking accounts as deemed appropriate.
|
||||
|
||||
If you are subject to or witness unacceptable behavior, or have any other concerns, please email us at [conduct@angular.io](mailto:conduct@angular.io).
|
34
COMMITTER.md
Normal file
34
COMMITTER.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Pushing changes into the Angular 2 tree
|
||||
|
||||
Please see [Using git with Angular repositories](https://docs.google.com/document/d/1h8nijFSaa1jG_UE8v4WP7glh5qOUXnYtAtJh_gwOQHI/edit)
|
||||
for details about how we maintain a linear commit history, and the rules for committing.
|
||||
|
||||
As a contributor, just read the instructions in [CONTRIBUTING.md](CONTRIBUTING.md) and send a pull request.
|
||||
Someone with committer access will do the rest.
|
||||
|
||||
## The `PR: merge` label and `presubmit-*` branches
|
||||
|
||||
We have automated the process for merging pull requests into master. Our goal is to minimize the disruption for
|
||||
Angular committers and also prevent breakages on master.
|
||||
|
||||
When a PR has `pr_state: LGTM` and is ready to merge, you should add the `pr_action: merge` label.
|
||||
Currently (late 2015), we need to ensure that each PR will cleanly merge into the Google-internal version control,
|
||||
so the caretaker reviews the changes manually.
|
||||
|
||||
After this review, the caretaker adds `zomg_admin: do_merge` which is restricted to admins only.
|
||||
A robot running as [mary-poppins](https://github.com/mary-poppins)
|
||||
is notified that the label was added by an authorized person,
|
||||
and will create a new branch in the angular project, using the convention `presubmit-{username}-pr-{number}`.
|
||||
|
||||
(Note: if the automation fails, committers can instead push the commits to a branch following this naming scheme.)
|
||||
|
||||
When a Travis build succeeds for a presubmit branch named following the convention,
|
||||
Travis will re-base the commits, merge to master, and close the PR automatically.
|
||||
|
||||
Finally, after merge `mary-poppins` removes the presubmit branch.
|
||||
|
||||
## Administration
|
||||
|
||||
The list of users who can trigger a merge by adding the `zomg_admin: do_merge` label is stored in our appengine app datastore.
|
||||
Edit the contents of the [CoreTeamMember Table](
|
||||
https://console.developers.google.com/project/angular2-automation/datastore/query?queryType=KindQuery&namespace=&kind=CoreTeamMember)
|
180
CONTRIBUTING.md
180
CONTRIBUTING.md
@ -1,6 +1,6 @@
|
||||
# Contributing to Angular
|
||||
# Contributing to Angular 2
|
||||
|
||||
We would love for you to contribute to Angular and help make it even better than it is
|
||||
We would love for you to contribute to Angular 2 and help make it even better than it is
|
||||
today! As a contributor, here are the guidelines we would like you to follow:
|
||||
|
||||
- [Code of Conduct](#coc)
|
||||
@ -17,27 +17,19 @@ Help us keep Angular open and inclusive. Please read and follow our [Code of Con
|
||||
|
||||
## <a name="question"></a> Got a Question or Problem?
|
||||
|
||||
Do not open issues for general support questions as we want to keep GitHub issues for bug reports and feature requests. You've got much better chances of getting your question answered on [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) where the questions should be tagged with tag `angular`.
|
||||
If you have questions about how to *use* Angular, please direct them to the [Google Group][angular-group]
|
||||
discussion list or [StackOverflow][stackoverflow]. Please note that Angular 2 is still in early developer preview, and the core team's capacity to answer usage questions is limited. We are also available on [Gitter][gitter].
|
||||
|
||||
Stack Overflow is a much better place to ask questions since:
|
||||
|
||||
- there are thousands of people willing to help on Stack Overflow
|
||||
- questions and answers stay available for public viewing so your question / answer might help someone else
|
||||
- Stack Overflow's voting system assures that the best answers are prominently visible.
|
||||
|
||||
To save your and our time, we will systematically close all issues that are requests for general support and redirect people to Stack Overflow.
|
||||
|
||||
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
If you find a bug in the source code, you can help us by
|
||||
## <a name="issue"></a> Found an Issue?
|
||||
If you find a bug in the source code or a mistake in the documentation, you can help us by
|
||||
[submitting an issue](#submit-issue) to our [GitHub Repository][github]. Even better, you can
|
||||
[submit a Pull Request](#submit-pr) with a fix.
|
||||
|
||||
## <a name="feature"></a> Missing a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our GitHub
|
||||
Repository. If you would like to *implement* a new feature, please submit an issue with
|
||||
a proposal for your work first, to be sure that we can use it.
|
||||
## <a name="feature"></a> Want a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our [GitHub
|
||||
Repository][github]. If you would like to *implement* a new feature, please submit an issue with
|
||||
a proposal for your work first, to be sure that we can use it. Angular 2 is in developer preview
|
||||
and we are not ready to accept major contributions ahead of the full release.
|
||||
Please consider what kind of change it is:
|
||||
|
||||
* For a **Major Feature**, first open an issue and outline your proposal so that it can be
|
||||
@ -48,59 +40,62 @@ and help you to craft the change so that it is successfully accepted into the pr
|
||||
## <a name="submit"></a> Submission Guidelines
|
||||
|
||||
### <a name="submit-issue"></a> Submitting an Issue
|
||||
Before you submit an issue, search the archive, maybe your question was already answered.
|
||||
|
||||
Before you submit an issue, please search the issue tracker, maybe an issue for your problem already exists and the discussion might inform you of workarounds readily available.
|
||||
If your issue appears to be a bug, and hasn't been reported, open a new issue.
|
||||
Help us to maximize the effort we can spend fixing issues and adding new
|
||||
features, by not reporting duplicate issues. Providing the following information will increase the
|
||||
chances of your issue being dealt with quickly:
|
||||
|
||||
We want to fix all the issues as soon as possible, but before fixing a bug we need to reproduce and confirm it. In order to reproduce bugs, we will systematically ask you to provide a minimal reproduction. Having a minimal reproducible scenario gives us a wealth of important information without going back & forth to you with additional questions.
|
||||
* **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
|
||||
* **Angular Version** - what version of Angular is affected (e.g. 2.0.0-alpha.53)
|
||||
* **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
|
||||
* **Browsers and Operating System** - is this a problem with all browsers?
|
||||
* **Reproduce the Error** - provide a live example (using [Plunker][plunker],
|
||||
[JSFiddle][jsfiddle] or [Runnable][runnable]) or a unambiguous set of steps
|
||||
* **Related Issues** - has a similar issue been reported before?
|
||||
* **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
|
||||
causing the problem (line of code or commit)
|
||||
|
||||
A minimal reproduction allows us to quickly confirm a bug (or point out a coding problem) as well as confirm that we are fixing the right problem.
|
||||
|
||||
We will be insisting on a minimal reproduction scenario in order to save maintainers time and ultimately be able to fix more bugs. Interestingly, from our experience, users often find coding problems themselves while preparing a minimal reproduction. We understand that sometimes it might be hard to extract essential bits of code from a larger codebase but we really need to isolate the problem before we can fix it.
|
||||
|
||||
Unfortunately, we are not able to investigate / fix bugs without a minimal reproduction, so if we don't hear back from you, we are going to close an issue that doesn't have enough info to be reproduced.
|
||||
|
||||
You can file new issues by selecting from our [new issue templates](https://github.com/angular/angular/issues/new/choose) and filling out the issue template.
|
||||
You can file new issues by providing the above information [here](https://github.com/angular/angular/issues/new).
|
||||
|
||||
|
||||
### <a name="submit-pr"></a> Submitting a Pull Request (PR)
|
||||
Before you submit your Pull Request (PR) consider the following guidelines:
|
||||
|
||||
1. Search [GitHub](https://github.com/angular/angular/pulls) for an open or closed PR
|
||||
* Search [GitHub](https://github.com/angular/angular/pulls) for an open or closed PR
|
||||
that relates to your submission. You don't want to duplicate effort.
|
||||
1. Be sure that an issue describes the problem you're fixing, or documents the design for the feature you'd like to add.
|
||||
Discussing the design up front helps to ensure that we're ready to accept your work.
|
||||
1. Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without this. Make sure you sign with the primary email address of the Git identity that has been granted access to the Angular repository.
|
||||
1. Fork the angular/angular repo.
|
||||
1. Make your changes in a new git branch:
|
||||
* Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without this.
|
||||
* Make your changes in a new git branch:
|
||||
|
||||
```shell
|
||||
git checkout -b my-fix-branch master
|
||||
```
|
||||
|
||||
1. Create your patch, **including appropriate test cases**.
|
||||
1. Follow our [Coding Rules](#rules).
|
||||
1. Run the full Angular test suite, as described in the [developer documentation][dev-doc],
|
||||
* Create your patch, **including appropriate test cases**.
|
||||
* Follow our [Coding Rules](#rules).
|
||||
* Run the full Angular test suite, as described in the [developer documentation][dev-doc],
|
||||
and ensure that all tests pass.
|
||||
1. Commit your changes using a descriptive commit message that follows our
|
||||
* Commit your changes using a descriptive commit message that follows our
|
||||
[commit message conventions](#commit). Adherence to these conventions
|
||||
is necessary because release notes are automatically generated from these messages.
|
||||
|
||||
```shell
|
||||
git commit -a
|
||||
```
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
|
||||
1. Push your branch to GitHub:
|
||||
* Push your branch to GitHub:
|
||||
|
||||
```shell
|
||||
git push origin my-fix-branch
|
||||
```
|
||||
|
||||
1. In GitHub, send a pull request to `angular:master`.
|
||||
* In GitHub, send a pull request to `angular:master`.
|
||||
* If we suggest changes then:
|
||||
* Make the required updates.
|
||||
* Re-run the Angular test suites to ensure tests are still passing.
|
||||
* Re-run the Angular 2 test suites for JS and Dart to ensure tests are still passing.
|
||||
* Rebase your branch and force push to your GitHub repository (this will update your Pull Request):
|
||||
|
||||
```shell
|
||||
@ -146,7 +141,7 @@ To ensure consistency throughout the source code, keep these rules in mind as yo
|
||||
* All public API methods **must be documented**. (Details TBC).
|
||||
* We follow [Google's JavaScript Style Guide][js-style-guide], but wrap all code at
|
||||
**100 characters**. An automated formatter is available, see
|
||||
[DEVELOPER.md](docs/DEVELOPER.md#clang-format).
|
||||
[DEVELOPER.md](DEVELOPER.md#clang-format).
|
||||
|
||||
## <a name="commit"></a> Commit Message Guidelines
|
||||
|
||||
@ -168,85 +163,36 @@ format that includes a **type**, a **scope** and a **subject**:
|
||||
|
||||
The **header** is mandatory and the **scope** of the header is optional.
|
||||
|
||||
Any line of the commit message cannot be longer than 100 characters! This allows the message to be easier
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier
|
||||
to read on GitHub as well as in various git tools.
|
||||
|
||||
The footer should contain a [closing reference to an issue](https://help.github.com/articles/closing-issues-via-commit-messages/) if any.
|
||||
|
||||
Samples: (even more [samples](https://github.com/angular/angular/commits/master))
|
||||
|
||||
```
|
||||
docs(changelog): update changelog to beta.5
|
||||
```
|
||||
```
|
||||
fix(release): need to depend on latest rxjs and zone.js
|
||||
|
||||
The version in our package.json gets copied to the one we publish, and users need the latest of these.
|
||||
```
|
||||
|
||||
### Revert
|
||||
If the commit reverts a previous commit, it should begin with `revert: `, followed by the header of the reverted commit. In the body it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit being reverted.
|
||||
|
||||
### Type
|
||||
Must be one of the following:
|
||||
|
||||
* **build**: Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)
|
||||
* **ci**: Changes to our CI configuration files and scripts (example scopes: Circle, BrowserStack, SauceLabs)
|
||||
* **docs**: Documentation only changes
|
||||
* **feat**: A new feature
|
||||
* **fix**: A bug fix
|
||||
* **perf**: A code change that improves performance
|
||||
* **docs**: Documentation only changes
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing
|
||||
semi-colons, etc)
|
||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
* **perf**: A code change that improves performance
|
||||
* **test**: Adding missing tests or correcting existing tests
|
||||
* **build**: Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)
|
||||
* **ci**: Changes to our CI configuration files and scripts (example scopes: Travis, Circle, BrowserStack, SauceLabs)
|
||||
* **chore**: Other changes that don't modify `src` or `test` files
|
||||
|
||||
### Scope
|
||||
The scope should be the name of the npm package affected (as perceived by the person reading the changelog generated from commit messages).
|
||||
|
||||
The following is the list of supported scopes:
|
||||
|
||||
* **animations**
|
||||
* **bazel**
|
||||
* **benchpress**
|
||||
* **common**
|
||||
* **compiler**
|
||||
* **compiler-cli**
|
||||
* **core**
|
||||
* **elements**
|
||||
* **forms**
|
||||
* **http**
|
||||
* **language-service**
|
||||
* **localize**
|
||||
* **platform-browser**
|
||||
* **platform-browser-dynamic**
|
||||
* **platform-server**
|
||||
* **platform-webworker**
|
||||
* **platform-webworker-dynamic**
|
||||
* **router**
|
||||
* **service-worker**
|
||||
* **upgrade**
|
||||
* **zone.js**
|
||||
|
||||
There are currently a few exceptions to the "use package name" rule:
|
||||
|
||||
* **packaging**: used for changes that change the npm package layout in all of our packages, e.g.
|
||||
public path changes, package.json changes done to all packages, d.ts file/format changes, changes
|
||||
to bundles, etc.
|
||||
* **changelog**: used for updating the release notes in CHANGELOG.md
|
||||
* **docs-infra**: used for docs-app (angular.io) related changes within the /aio directory of the
|
||||
repo
|
||||
* **dev-infra**: used for dev-infra related changes within the directories /scripts, /tools and /dev-infra
|
||||
* **ngcc**: used for changes to the [Angular Compatibility Compiler](./packages/compiler-cli/ngcc/README.md)
|
||||
* **ve**: used for changes specific to ViewEngine (legacy compiler/renderer).
|
||||
* none/empty string: useful for `style`, `test` and `refactor` changes that are done across all
|
||||
packages (e.g. `style: add missing semicolons`) and for docs changes that are not related to a
|
||||
specific package (e.g. `docs: fix typo in tutorial`).
|
||||
The scope could be anything specifying place of the commit change. For example
|
||||
`Compiler`, `ElementInjector`, etc.
|
||||
|
||||
### Subject
|
||||
The subject contains a succinct description of the change:
|
||||
The subject contains succinct description of the change:
|
||||
|
||||
* use the imperative, present tense: "change" not "changed" nor "changes"
|
||||
* don't capitalize the first letter
|
||||
* don't capitalize first letter
|
||||
* no dot (.) at the end
|
||||
|
||||
### Body
|
||||
@ -266,33 +212,21 @@ A detailed explanation can be found in this [document][commit-message-format].
|
||||
Please sign our Contributor License Agreement (CLA) before sending pull requests. For any code
|
||||
changes to be accepted, the CLA must be signed. It's a quick process, we promise!
|
||||
|
||||
* For individuals, we have a [simple click-through form][individual-cla].
|
||||
* For corporations, we'll need you to
|
||||
* For individuals we have a [simple click-through form][individual-cla].
|
||||
* For corporations we'll need you to
|
||||
[print, sign and one of scan+email, fax or mail the form][corporate-cla].
|
||||
|
||||
<hr>
|
||||
|
||||
If you have more than one Git identity, you must make sure that you sign the CLA using the primary email address associated with the ID that has been granted access to the Angular repository. Git identities can be associated with more than one email address, and only one is primary. Here are some links to help you sort out multiple Git identities and email addresses:
|
||||
|
||||
* https://help.github.com/articles/setting-your-commit-email-address-in-git/
|
||||
* https://stackoverflow.com/questions/37245303/what-does-usera-committed-with-userb-13-days-ago-on-github-mean
|
||||
* https://help.github.com/articles/about-commit-email-addresses/
|
||||
* https://help.github.com/articles/blocking-command-line-pushes-that-expose-your-personal-email-address/
|
||||
|
||||
Note that if you have more than one Git identity, it is important to verify that you are logged in with the same ID with which you signed the CLA, before you commit changes. If not, your PR will fail the CLA check.
|
||||
|
||||
<hr>
|
||||
|
||||
[angular-group]: https://groups.google.com/forum/#!forum/angular
|
||||
[coc]: https://github.com/angular/code-of-conduct/blob/master/CODE_OF_CONDUCT.md
|
||||
[commit-message-format]: https://docs.google.com/document/d/1QrDFcIiPjSLDn3EL15IJygNPiHORgU1_OOAqWjiDU5Y/edit#
|
||||
[corporate-cla]: http://code.google.com/legal/corporate-cla-v1.0.html
|
||||
[dev-doc]: https://github.com/angular/angular/blob/master/docs/DEVELOPER.md
|
||||
[dev-doc]: https://github.com/angular/angular/blob/master/DEVELOPER.md
|
||||
[github]: https://github.com/angular/angular
|
||||
[gitter]: https://gitter.im/angular/angular
|
||||
[individual-cla]: http://code.google.com/legal/individual-cla-v1.0.html
|
||||
[js-style-guide]: https://google.github.io/styleguide/jsguide.html
|
||||
[jsfiddle]: http://jsfiddle.net
|
||||
[js-style-guide]: http://google-styleguide.googlecode.com/svn/trunk/javascriptguide.xml
|
||||
[jsfiddle]: http://jsfiddle.net/
|
||||
[plunker]: http://plnkr.co/edit
|
||||
[runnable]: http://runnable.com
|
||||
[runnable]: http://runnable.com/
|
||||
[stackoverflow]: http://stackoverflow.com/questions/tagged/angular
|
||||
|
362
DEVELOPER.md
Normal file
362
DEVELOPER.md
Normal file
@ -0,0 +1,362 @@
|
||||
# Building and Testing Angular 2 for JS and Dart
|
||||
|
||||
This document describes how to set up your development environment to build and test Angular, both
|
||||
JS and Dart versions. It also explains the basic mechanics of using `git`, `node`, and `npm`.
|
||||
|
||||
* [Prerequisite Software](#prerequisite-software)
|
||||
* [Getting the Sources](#getting-the-sources)
|
||||
* [Environment Variable Setup](#environment-variable-setup)
|
||||
* [Installing NPM Modules and Dart Packages](#installing-npm-modules-and-dart-packages)
|
||||
* [Build commands](#build-commands)
|
||||
* [Running Tests Locally](#running-tests-locally)
|
||||
* [Code Style](#code-style)
|
||||
* [Project Information](#project-information)
|
||||
* [CI using Travis](#ci-using-travis)
|
||||
* [Transforming Dart code](#transforming-dart-code)
|
||||
* [Debugging](#debugging)
|
||||
|
||||
See the [contribution guidelines](https://github.com/angular/angular/blob/master/CONTRIBUTING.md)
|
||||
if you'd like to contribute to Angular.
|
||||
|
||||
## Prerequisite Software
|
||||
|
||||
Before you can build and test Angular, you must install and configure the
|
||||
following products on your development machine:
|
||||
|
||||
* [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
|
||||
* [Node.js](http://nodejs.org), (version `>=5.4.1 <6`) which is used to run a development web server,
|
||||
run tests, and generate distributable files. We also use Node's Package Manager, `npm`
|
||||
(version `>=3.5.3 <4.0`), which comes with Node. Depending on your system, you can install Node either from
|
||||
source or as a pre-packaged bundle.
|
||||
|
||||
* *Optional*: [Dart](https://www.dartlang.org) (version ` >=1.13.2 <2.0.0`), specifically the Dart-SDK and
|
||||
Dartium (a version of [Chromium](http://www.chromium.org) with native support for Dart through
|
||||
the Dart VM). One of the **simplest** ways to get both is to install the **Dart Editor bundle**,
|
||||
which includes the editor, SDK and Dartium. See the [Dart tools](https://www.dartlang.org/tools)
|
||||
download [page for instructions](https://www.dartlang.org/tools/download.html).
|
||||
You can also download both **stable** and **dev** channel versions from the [download
|
||||
archive](https://www.dartlang.org/tools/download-archive). In that case, on Windows, Dart must be added
|
||||
to the `Path` (e.g. `path-to-dart-sdk-folder\bin`) and a new `DARTIUM_BIN` environment variable must be
|
||||
created, pointing to the executable (e.g. `path-to-dartium-folder\chrome.exe).`
|
||||
|
||||
|
||||
|
||||
## Getting the Sources
|
||||
|
||||
Fork and clone the Angular repository:
|
||||
|
||||
1. Login to your GitHub account or create one by following the instructions given
|
||||
[here](https://github.com/signup/free).
|
||||
2. [Fork](http://help.github.com/forking) the [main Angular
|
||||
repository](https://github.com/angular/angular).
|
||||
3. Clone your fork of the Angular repository and define an `upstream` remote pointing back to
|
||||
the Angular repository that you forked in the first place.
|
||||
|
||||
```shell
|
||||
# Clone your GitHub repository:
|
||||
git clone git@github.com:<github username>/angular.git
|
||||
|
||||
# Go to the Angular directory:
|
||||
cd angular
|
||||
|
||||
# Add the main Angular repository as an upstream remote to your repository:
|
||||
git remote add upstream https://github.com/angular/angular.git
|
||||
```
|
||||
|
||||
## Environment Variable Setup
|
||||
|
||||
Define the environment variables listed below. These are mainly needed for the testing. The
|
||||
notation shown here is for [`bash`](http://www.gnu.org/software/bash); adapt as appropriate for
|
||||
your favorite shell.
|
||||
|
||||
Examples given below of possible values for initializing the environment variables assume **Mac OS
|
||||
X** and that you have installed the Dart Editor in the directory named by
|
||||
`DART_EDITOR_DIR=/Applications/dart`. This is only for illustrative purposes.
|
||||
|
||||
```shell
|
||||
# DARTIUM_BIN: path to a Dartium browser executable; used by Karma to run Dart tests
|
||||
export DARTIUM_BIN="$DART_EDITOR_DIR/chromium/Chromium.app/Contents/MacOS/Chromium"
|
||||
```
|
||||
|
||||
Add the Dart SDK `bin` directory to your path and/or define `DART_SDK` (this is also detailed
|
||||
[here](https://www.dartlang.org/tools/pub/installing.html)):
|
||||
|
||||
```shell
|
||||
# DART_SDK: path to a Dart SDK directory
|
||||
export DART_SDK="$DART_EDITOR_DIR/dart-sdk"
|
||||
|
||||
# Update PATH to include the Dart SDK bin directory
|
||||
PATH+=":$DART_SDK/bin"
|
||||
```
|
||||
|
||||
And specify where the pub’s dependencies are downloaded. By default, this directory is located under .pub_cache
|
||||
in your home directory (on Mac and Linux), or in AppData\Roaming\Pub\Cache (on Windows).
|
||||
|
||||
```shell
|
||||
# PUB_CACHE: location of pub dependencies
|
||||
export PUB_CACHE="/Users/<user>/.pub-cache"
|
||||
```
|
||||
|
||||
## Installing NPM Modules and Dart Packages
|
||||
|
||||
Next, install the JavaScript modules and Dart packages needed to build and test Angular:
|
||||
|
||||
```shell
|
||||
# Install Angular project dependencies (package.json)
|
||||
npm install
|
||||
```
|
||||
|
||||
**Optional**: In this document, we make use of project local `npm` package scripts and binaries
|
||||
(stored under `./node_modules/.bin`) by prefixing these command invocations with `$(npm bin)`; in
|
||||
particular `gulp` and `protractor` commands. If you prefer, you can drop this path prefix by either:
|
||||
|
||||
*Option 1*: globally installing these two packages as follows:
|
||||
|
||||
* `npm install -g gulp` (you might need to prefix this command with `sudo`)
|
||||
* `npm install -g protractor` (you might need to prefix this command with `sudo`)
|
||||
|
||||
Since global installs can become stale, and required versions can vary by project, we avoid their
|
||||
use in these instructions.
|
||||
|
||||
*Option 2*: defining a bash alias like `alias nbin='PATH=$(npm bin):$PATH'` as detailed in this
|
||||
[Stackoverflow answer](http://stackoverflow.com/questions/9679932/how-to-use-package-installed-locally-in-node-modules/15157360#15157360) and used like this: e.g., `nbin gulp build`.
|
||||
|
||||
## Build commands
|
||||
|
||||
To build Angular and prepare tests, run:
|
||||
|
||||
```shell
|
||||
$(npm bin)/gulp build
|
||||
```
|
||||
|
||||
Notes:
|
||||
* Results are put in the `dist` folder.
|
||||
* This will also run `pub get` for the subfolders in `modules` and run `dartanalyzer` for
|
||||
every file that matches `<module>/src/<module>.dart`, e.g. `di/src/di.dart`.
|
||||
|
||||
You can selectively build either the JS or Dart versions as follows:
|
||||
|
||||
* `$(npm bin)/gulp build.js`
|
||||
* `$(npm bin)/gulp build.dart`
|
||||
|
||||
To clean out the `dist` folder, run:
|
||||
|
||||
```shell
|
||||
$(npm bin)/gulp clean
|
||||
```
|
||||
|
||||
## Running Tests Locally
|
||||
|
||||
### Full test suite
|
||||
|
||||
* `npm test`: full test suite for both JS and Dart versions of Angular. These are the same tests
|
||||
that run on Travis.
|
||||
|
||||
You can selectively run either the JS or Dart versions as follows:
|
||||
|
||||
* `$(npm bin)/gulp test.all.js`
|
||||
* `$(npm bin)/gulp test.all.dart`
|
||||
|
||||
### Unit tests
|
||||
|
||||
You can run just the unit tests as follows:
|
||||
|
||||
* `$(npm bin)/gulp test.unit.js`: JS tests in a browser; runs in **watch mode** (i.e.
|
||||
watches the test files for changes and re-runs tests when files are updated).
|
||||
* `$(npm bin)/gulp test.unit.cjs`: JS tests in NodeJS; runs in **watch mode**.
|
||||
* `$(npm bin)/gulp test.unit.dart`: Dart tests in Dartium; runs in **watch mode**.
|
||||
|
||||
If you prefer running tests in "single-run" mode rather than watch mode use:
|
||||
|
||||
* `$(npm bin)/gulp test.unit.js/ci`
|
||||
* `$(npm bin)/gulp test.unit.cjs/ci`
|
||||
* `$(npm bin)/gulp test.unit.dart/ci`
|
||||
|
||||
The task updates the dist folder with transpiled code whenever a source or test file changes, and
|
||||
Karma is run against the new output.
|
||||
|
||||
**Note**: If you want to only run a single test you can alter the test you wish to run by changing
|
||||
`it` to `iit` or `describe` to `ddescribe`. This will only run that individual test and make it
|
||||
much easier to debug. `xit` and `xdescribe` can also be useful to exclude a test and a group of
|
||||
tests respectively.
|
||||
|
||||
**Note**: **watch mode** needs symlinks to work, so if you're using windows, ensure you have the
|
||||
rights to built them in your operating system.
|
||||
|
||||
### Unit tests with Sauce Labs or Browser Stack
|
||||
|
||||
First, in a terminal, create a tunnel with [Sauce Connect](https://docs.saucelabs.com/reference/sauce-connect/) or [Browser Stack Local](https://www.browserstack.com/local-testing#command-line), and valid credentials.
|
||||
|
||||
Then, in another terminal:
|
||||
- Define the credentials as environment variables, e.g.:
|
||||
```
|
||||
export SAUCE_USERNAME='my_user'; export SAUCE_ACCESS_KEY='my_key';
|
||||
export BROWSER_STACK_USERNAME='my_user'; export BROWSER_STACK_ACCESS_KEY='my_key';
|
||||
```
|
||||
- Then run `gulp test.unit.js.(sauce|browserstack) --browsers=option1,option2,..,optionN`
|
||||
The options are any mix of browsers and aliases which are defined in the [browser-providers.conf.js](https://github.com/angular/angular/blob/master/browser-providers.conf.js) file.
|
||||
They are case insensitive, and the `SL_` or `BS_` prefix must not be added for browsers.
|
||||
|
||||
Some examples of commands:
|
||||
```
|
||||
gulp test.unit.js.sauce --browsers=Safari8,ie11 //run in Sauce Labs with Safari 8 and IE11
|
||||
gulp test.unit.js.browserstack --browsers=Safari,IE //run in Browser Stack with Safari 7, Safari 8, Safari 9, IE 9, IE 10 and IE 11
|
||||
gulp test.unit.js.sauce --browsers=IOS,safari8,android5.1 //run in Sauce Labs with iOS 7, iOS 8, iOs 9, Safari 8 and Android 5.1
|
||||
```
|
||||
|
||||
### E2E tests
|
||||
|
||||
1. `$(npm bin)/gulp build.js.cjs` (builds benchpress and tests into `dist/js/cjs` folder).
|
||||
2. `$(npm bin)/gulp serve.js.prod serve.dart` (runs a local webserver).
|
||||
3. `$(npm bin)/protractor protractor-js.conf.js`: JS e2e tests.
|
||||
4. `$(npm bin)/protractor protractor-dart2js.conf.js`: dart2js e2e tests.
|
||||
|
||||
Angular specific command line options when running protractor:
|
||||
- `$(npm bin)/protractor protractor-{js|dart2js}-conf.js --ng-help`
|
||||
|
||||
### Performance tests
|
||||
|
||||
1. `$(npm bin)/gulp build.js.cjs` (builds benchpress and tests into `dist/js/cjs` folder)
|
||||
2. `$(npm bin)/gulp serve.js.prod serve.dart` (runs a local webserver)
|
||||
3. `$(npm bin)/protractor protractor-js.conf.js --benchmark`: JS performance tests
|
||||
4. `$(npm bin)/protractor protractor-dart2js.conf.js --benchmark`: dart2js performance tests
|
||||
|
||||
Angular specific command line options when running protractor (e.g. force gc, ...):
|
||||
`$(npm bin)/protractor protractor-{js|dart2js}-conf.js --ng-help`
|
||||
|
||||
## Code Style
|
||||
|
||||
### Formatting with <a name="clang-format">clang-format</a>
|
||||
|
||||
We use [clang-format](http://clang.llvm.org/docs/ClangFormat.html) to automatically enforce code
|
||||
style for our TypeScript code. This allows us to focus our code reviews more on the content, and
|
||||
less on style nit-picking. It also lets us encode our style guide in the `.clang-format` file in the
|
||||
repository, allowing many tools and editors to share our settings.
|
||||
|
||||
To check the formatting of your code, run
|
||||
|
||||
gulp check-format
|
||||
|
||||
Note that the continuous build on Travis runs `gulp enforce-format`. Unlike the `check-format` task,
|
||||
this will actually fail the build if files aren't formatted according to the style guide.
|
||||
|
||||
Your life will be easier if you include the formatter in your standard workflow. Otherwise, you'll
|
||||
likely forget to check the formatting, and waste time waiting for a build on Travis that fails due
|
||||
to some whitespace difference.
|
||||
|
||||
* Use `$(npm bin)/clang-format -i [file name]` to format a file (or multiple).
|
||||
* Use `gulp enforce-format` to check if your code is `clang-format` clean. This also gives
|
||||
you a command line to format your code.
|
||||
* `clang-format` also includes a git hook, run `git clang-format` to format all files you
|
||||
touched.
|
||||
* You can run this as a **git pre-commit hook** to automatically format your delta regions when you
|
||||
commit a change. In the angular repo, run
|
||||
|
||||
```
|
||||
$ echo -e '#!/bin/sh\nexec git clang-format' > .git/hooks/pre-commit
|
||||
$ chmod u+x !$
|
||||
```
|
||||
|
||||
* **WebStorm** can run clang-format on the current file.
|
||||
1. Under Preferences, open Tools > External Tools.
|
||||
1. Plus icon to Create Tool
|
||||
1. Fill in the form:
|
||||
- Name: clang-format
|
||||
- Description: Format
|
||||
- Synchronize files after execution: checked
|
||||
- Open console: not checked
|
||||
- Show in: Editor menu
|
||||
- Program: `$ProjectFileDir$/node_modules/.bin/clang-format`
|
||||
- Parameters: `-i -style=file $FilePath$`
|
||||
- Working directory: `$ProjectFileDir$`
|
||||
* `clang-format` integrations are also available for many popular editors (`vim`, `emacs`,
|
||||
`Sublime Text`, etc.).
|
||||
|
||||
### Linting
|
||||
|
||||
We use [tslint](https://github.com/palantir/tslint) for linting. See linting rules in [gulpfile](gulpfile.js). To lint, run
|
||||
|
||||
```shell
|
||||
$ gulp lint
|
||||
```
|
||||
|
||||
## Generating the API documentation
|
||||
|
||||
The following gulp task will generate the API docs in the `dist/angular.io/partials/api/angular2`:
|
||||
|
||||
```shell
|
||||
$(npm bin)/gulp docs/angular.io
|
||||
```
|
||||
|
||||
You can serve the generated documentation to check how it would render on [angular.io](https://angular.io/):
|
||||
- check out the [angular.io repo](https://github.com/angular/angular.io) locally,
|
||||
- install dependencies as described in the [angular.io README](https://github.com/angular/angular.io/blob/master/README.md),
|
||||
- copy the generated documentation from your local angular repo at `angular/dist/angular.io/partials/api/angular2` to your local angular.io repo at `angular.io/public/docs/js/latest/api`,
|
||||
- run `harp compile` at the root of the angular.io repo to check the generated documentation for errors,
|
||||
- run `harp server` and open a browser at `http://localhost:9000/docs/js/latest/api/` to check the rendered documentation.
|
||||
|
||||
## Project Information
|
||||
|
||||
### Folder structure
|
||||
|
||||
* `modules/*`: modules that will be loaded in the browser
|
||||
* `tools/*`: tools that are needed to build Angular
|
||||
* `dist/*`: build files are placed here.
|
||||
|
||||
### File suffixes
|
||||
|
||||
* `*.ts`: TypeScript files that get transpiled to Dart and EcmaScript 5/6
|
||||
* `*.dart`: Dart files that don't get transpiled
|
||||
|
||||
## CI using Travis
|
||||
|
||||
For instructions on setting up Continuous Integration using Travis, see the instructions given
|
||||
[here](https://github.com/angular/angular.dart/blob/master/travis.md).
|
||||
|
||||
## Transforming Dart code
|
||||
|
||||
See the [wiki](//github.com/angular/angular/wiki/Angular-2-Dart-Transformer).
|
||||
|
||||
## Debugging
|
||||
|
||||
### Debug the transpiler
|
||||
|
||||
If you need to debug the transpiler:
|
||||
|
||||
- add a `debugger;` statement in the transpiler code,
|
||||
- from the root folder, execute `node debug $(npm bin)/gulp build` to enter the node
|
||||
debugger
|
||||
- press "c" to execute the program until you reach the `debugger;` statement,
|
||||
- you can then type "repl" to enter the REPL and inspect variables in the context.
|
||||
|
||||
See the [Node.js manual](http://nodejs.org/api/debugger.html) for more information.
|
||||
|
||||
Notes:
|
||||
- You can also execute `node $(npm bin)/karma start karma-dart.conf.js` depending on which
|
||||
code you want to debug (the former will process the "modules" folder while the later processes
|
||||
the transpiler specs).
|
||||
- You can also add `debugger;` statements in the specs (JavaScript). The execution will halt when
|
||||
the developer tools are opened in the browser running Karma.
|
||||
|
||||
### Debug the tests
|
||||
|
||||
If you need to debug the tests:
|
||||
|
||||
- add a `debugger;` statement to the test you want to debug (or the source code),
|
||||
- execute karma `$(npm bin)/gulp test.js`,
|
||||
- press the top right "DEBUG" button,
|
||||
- open the DevTools and press F5,
|
||||
- the execution halts at the `debugger;` statement
|
||||
|
||||
**Note (WebStorm users)**:
|
||||
|
||||
1. Create a Karma run config from WebStorm.
|
||||
2. Then in the "Run" menu, press "Debug 'karma-js.conf.js'", and WebStorm will stop in the generated
|
||||
code on the `debugger;` statement.
|
||||
3. You can then step into the code and add watches.
|
||||
|
||||
The `debugger;` statement is needed because WebStorm will stop in a transpiled file. Breakpoints in
|
||||
the original source files are not supported at the moment.
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2010-2020 Google LLC. http://angular.io/license
|
||||
Copyright (c) 2014-2016 Google, Inc. http://angular.io
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -1,13 +1,13 @@
|
||||
Naming Conventions in Angular
|
||||
Naming Conventions in Angular2
|
||||
---
|
||||
|
||||
In general Angular should follow TypeScript naming conventions.
|
||||
In general Angular2 should follow TypeScript naming conventions.
|
||||
See: https://github.com/Microsoft/TypeScript/wiki/Coding-guidelines
|
||||
|
||||
|
||||
Classes:
|
||||
- Example: `Compiler`, `ApplicationMetadata`
|
||||
- Camel case with first letter uppercase
|
||||
- Camel case with first letter upper-case
|
||||
- In general prefer single words. (This is so that when appending `Proto` or `Factory` the class
|
||||
is still reasonable to work with.)
|
||||
- Should not end with `Impl` or any other word which describes a specific implementation of an
|
||||
@ -21,10 +21,10 @@ Interfaces:
|
||||
|
||||
Methods and functions:
|
||||
- Example: `bootstrap`, `someMethod`
|
||||
- Should be camel case with first letter lowercase
|
||||
- Should be camel case with first lower case
|
||||
|
||||
|
||||
Constants:
|
||||
Constants
|
||||
- Example: `CORE_DIRECTIVES`
|
||||
- Should be all uppercase with SNAKE_CASE
|
||||
|
30
README.md
30
README.md
@ -1,26 +1,34 @@
|
||||
[](https://circleci.com/gh/angular/workflows/angular/tree/master)
|
||||
[](https://travis-ci.org/angular/angular)
|
||||
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://www.npmjs.com/@angular/core)
|
||||
[](http://issuestats.com/github/angular/angular)
|
||||
[](http://issuestats.com/github/angular/angular)
|
||||
[](http://badge.fury.io/js/angular2)
|
||||
[](https://npmjs.org/package/angular2)
|
||||
[](https://saucelabs.com/u/angular2-ci)
|
||||
|
||||
Angular
|
||||
=========
|
||||
|
||||
# Angular
|
||||
Angular is a development platform for building mobile and desktop web applications. This is the
|
||||
repository for [Angular 2][ng2], both the JavaScript (JS) and [Dart][dart] versions.
|
||||
|
||||
Angular is a development platform for building mobile and desktop web applications using TypeScript/JavaScript and other languages.
|
||||
Angular 2 is currently in **Beta**.
|
||||
|
||||
## Quickstart
|
||||
|
||||
[Get started in 5 minutes][quickstart].
|
||||
|
||||
## Changelog
|
||||
|
||||
[Learn about the latest improvements][changelog].
|
||||
|
||||
## Want to help?
|
||||
|
||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
||||
guidelines for [contributing][contributing] and then check out one of our issues in the [hotlist: community-help](https://github.com/angular/angular/labels/hotlist%3A%20community-help).
|
||||
|
||||
[contributing]: https://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[quickstart]: https://angular.io/start
|
||||
[changelog]: https://github.com/angular/angular/blob/master/CHANGELOG.md
|
||||
[ng]: https://angular.io
|
||||
|
||||
[contributing]: http://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[dart]: http://www.dartlang.org
|
||||
[dartium]: http://www.dartlang.org/tools/dartium
|
||||
[quickstart]: https://angular.io/docs/ts/latest/quickstart.html
|
||||
[ng2]: http://angular.io
|
||||
[ngDart]: http://angulardart.org
|
||||
[ngJS]: http://angularjs.org
|
||||
|
4
TOOLS.md
Normal file
4
TOOLS.md
Normal file
@ -0,0 +1,4 @@
|
||||
# Developer Tools for Angular 2
|
||||
|
||||
- [JavaScript](TOOLS_JS.md)
|
||||
- [Dart](TOOLS_DART.md)
|
376
TOOLS_DART.md
Normal file
376
TOOLS_DART.md
Normal file
@ -0,0 +1,376 @@
|
||||
# Developer Tools for Dart
|
||||
|
||||
Use these tools and techniques to increase your app's performance
|
||||
and reliability.
|
||||
|
||||
* [Angular debugging tools](#angular-debugging-tools)
|
||||
* [Code size](#code-size)
|
||||
* [Performance](#performance)
|
||||
|
||||
|
||||
## Angular debugging tools
|
||||
|
||||
Starting with alpha.38, Angular provides a set of debugging tools
|
||||
that are accessible from any browser's developer console.
|
||||
In Chrome, you can get to the dev console by pressing
|
||||
Ctrl + Shift + J (on Mac: Cmd + Opt + J).
|
||||
|
||||
### Enabling the debugging tools
|
||||
|
||||
By default the debugging tools are disabled.
|
||||
Enable the debugging tools as follows:
|
||||
|
||||
```dart
|
||||
import 'package:angular2/platform/browser.dart';
|
||||
|
||||
main() async {
|
||||
var appRef = await bootstrap(Application);
|
||||
enableDebugTools(appRef);
|
||||
}
|
||||
```
|
||||
|
||||
<!-- Change function name to enableDebuggingTools? -->
|
||||
|
||||
|
||||
### Using the debugging tools
|
||||
|
||||
In the browser, open the dev console. The top-level object is called `ng` and
|
||||
contains more specific tools inside it.
|
||||
|
||||
For example, to run the change detection profiler on your app:
|
||||
|
||||
```javascript
|
||||
// In the dev console:
|
||||
ng.profiler.timeChangeDetection();
|
||||
```
|
||||
|
||||
The [Change detection profiler](#change-detection-profiler) section
|
||||
has more details.
|
||||
<!-- Point to API docs when they're published, if they're useful.
|
||||
They should be under
|
||||
http://www.dartdocs.org/documentation/angular2/latest
|
||||
and/or
|
||||
https://angular.io/docs/js/latest/api/. -->
|
||||
|
||||
|
||||
## Code size
|
||||
|
||||
Code must be downloaded, parsed, and executed. Too much code can lead to
|
||||
slow application start-up time, especially on slow networks and low-end devices.
|
||||
The tools and techniques in this section can help you to identify
|
||||
unnecessarily large code and to reduce code size.
|
||||
|
||||
### Finding contributors to code size
|
||||
|
||||
Options for investigating code size include the `--dump-info` dart2js option,
|
||||
ng2soyc, `reflector.trackUsage()`, and code coverage information
|
||||
from the Dart VM.
|
||||
|
||||
#### Use --dump-info
|
||||
|
||||
The `--dump-info` option of `dart2js` outputs information about what happened
|
||||
during compilation. You can specify `--dump-info` in `pubspec.yaml`:
|
||||
|
||||
```yaml
|
||||
transformers:
|
||||
...
|
||||
- $dart2js:
|
||||
commandLineOptions:
|
||||
- --dump-info
|
||||
```
|
||||
|
||||
The [Dump Info Visualizer](https://github.com/dart-lang/dump-info-visualizer)
|
||||
can help you analyze the output.
|
||||
For more information, see the
|
||||
[dart2js_info API reference](http://dart-lang.github.io/dart2js_info/doc/api/).
|
||||
|
||||
#### Use ng2soyc.dart
|
||||
|
||||
[ng2soyc](https://github.com/angular/ng2soyc.dart) is a utility for analyzing
|
||||
code size contributors in Angular 2 applications. It groups code size by
|
||||
library and, assuming your library names follow
|
||||
[standard naming conventions](https://www.dartlang.org/articles/style-guide/#do-prefix-library-names-with-the-package-name-and-a-dot-separated-path)
|
||||
(package.library.sublibrary...), gives the code size breakdown at
|
||||
each level. To reduce noise in the output of very large apps, ng2soyc provides
|
||||
an option to hide libraries that are too small, so you can focus on the biggest
|
||||
contributors.
|
||||
|
||||
#### Find unused reflection data
|
||||
|
||||
Your app might have types that are annotated with `@Component` or `@Injectable`
|
||||
but never used.
|
||||
To find these unused types, use `reflector.trackUsage()` and then,
|
||||
after exercising your app, `reflector.listUnusedKeys()`.
|
||||
For example:
|
||||
|
||||
```
|
||||
import 'package:angular2/src/core/reflection/reflection.dart';
|
||||
...
|
||||
main() async {
|
||||
reflector.trackUsage();
|
||||
await bootstrap(AppComponent);
|
||||
print('Unused keys: ${reflector.listUnusedKeys()}');
|
||||
}
|
||||
```
|
||||
|
||||
When you run that code (in Dartium or another browser),
|
||||
you'll see a list of types that Angular _can_ inject but hasn't needed to.
|
||||
Consider removing those types or their `@Component`/`@Injectable` annotation
|
||||
to decrease your app's code size.
|
||||
|
||||
Three conditions must be true for `listUnusedKeys()` to return helpful data:
|
||||
|
||||
1. The angular2 transformer must run on the app.
|
||||
2. If you're running a JavaScript version of the app,
|
||||
the app must not be minified, so that the names are readable.
|
||||
3. You must exercise your app in as many ways as possible
|
||||
before calling `listUnusedKeys()`.
|
||||
Otherwise, you might get false positives:
|
||||
keys that haven't been used only because you didn't exercise
|
||||
the relevant feature of the app.
|
||||
|
||||
To run the angular2 transformer, first specify it in `pubspec.yaml`:
|
||||
|
||||
```
|
||||
name: hello_world
|
||||
...
|
||||
transformers:
|
||||
- angular2:
|
||||
entry_points: web/main.dart
|
||||
```
|
||||
|
||||
Then use pub to run the transformer. If you use `pub serve`,
|
||||
it provides both Dart and unminified (by default) JavaScript versions.
|
||||
If you want to serve actual files, then use `pub build` in debug mode
|
||||
to generate Dart and unminified JavaScript files:
|
||||
`pub build --mode=debug`.
|
||||
|
||||
The `reflector.trackUsage()` method makes Angular track the reflection
|
||||
information used by the app. Reflection information (`ReflectionInfo`) is a data
|
||||
structure that stores information that Angular uses for locating DI factories
|
||||
and for generating change detectors and other code related to a
|
||||
given type.
|
||||
|
||||
#### Use code coverage to find dead code
|
||||
|
||||
When running in Dartium (or in the Dart VM, in general) you can request code
|
||||
coverage information from the VM. You can either use
|
||||
[observatory](https://www.dartlang.org/tools/observatory/) or download
|
||||
the coverage file and use your own tools to inspect it. Lines of code that are
|
||||
not covered are top candidates for dead code.
|
||||
|
||||
Keep in mind, however, that uncovered code is not sufficient evidence of dead
|
||||
code, only necessary evidence. It is perfectly possible that you simply didn't
|
||||
exercise your application in a way that triggers the execution of uncovered
|
||||
code. A common example is error handling code. Just because your testing never
|
||||
encountered an error does not mean the error won't happen in production. You
|
||||
therefore don't have to rush and remove all the `catch` blocks.
|
||||
|
||||
### Reducing code size
|
||||
|
||||
To reduce code size, you can disable reflection,
|
||||
enable minification, and manually remove dead code.
|
||||
You can also try less safe options such as
|
||||
telling dart2js to trust type annotations.
|
||||
|
||||
|
||||
#### Disable reflection
|
||||
|
||||
`dart:mirrors` allows discovering program metadata at runtime. However, this
|
||||
means that `dart2js` needs to retain that metadata and thus increase the size
|
||||
of resulting JS output. In practice, however, it is possible to extract most
|
||||
metadata necessary for your metaprogramming tasks statically using a
|
||||
transformer and `package:analyzer`, and act on it before compiling to JS.
|
||||
|
||||
#### Enable minification
|
||||
|
||||
Minification shortens all your `longMethodNames` into 2- or 3-letter long
|
||||
symbols. `dart2js` ensures that this kind of renaming is done safely, without
|
||||
breaking the functionality of your programs. You can enable it in `pubspec.yaml`
|
||||
under `$dart2js` transformer:
|
||||
|
||||
```yaml
|
||||
transformers:
|
||||
...
|
||||
- $dart2js:
|
||||
minify: true
|
||||
```
|
||||
|
||||
#### Manually remove dead code
|
||||
|
||||
`dart2js` comes with dead code elimination out-of-the-box. However, it may not
|
||||
always be able to tell if a piece of code could be used. Consider the following
|
||||
example:
|
||||
|
||||
```dart
|
||||
/// This function decides which serialization format to use
|
||||
void setupSerializers() {
|
||||
if (server.doYouSupportProtocolBuffers()) {
|
||||
useProtobufSerializers();
|
||||
} else {
|
||||
useJsonSerializers();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
In this example the application asks the server what kind of serialization
|
||||
format it uses and dynamically chooses one or the other. `dart2js` can't
|
||||
tell whether the server responds with yes or no, so it must retain both
|
||||
kinds of serializers. However, if you know that your server supports
|
||||
protocol buffers, you can remove that `if` block entirely and default to
|
||||
protocol buffers.
|
||||
|
||||
Code coverage (see above) is a good way to find dead code in your app.
|
||||
|
||||
#### Unsafe options
|
||||
|
||||
Dart also provides more aggressive optimization options. However, you have to
|
||||
be careful when using them and as of today the benefits aren't that clear. If
|
||||
your type annotations are inaccurate you may end up with non-Darty runtime
|
||||
behavior, including the classic "undefined is not a function" tautology, as
|
||||
well as the "keep on truckin'" behavior, e.g. `null + 1 == 1` and
|
||||
`{} + [] == 0`.
|
||||
|
||||
`--trust-type-annotations` tells `dart2js` to trust that your type annotations
|
||||
are correct. So if you have a function `foo(Bar bar)` the compiler can omit the
|
||||
check that `bar` is truly `Bar` when calling methods on it.
|
||||
|
||||
`--trust-primitives` tells `dart2js` that primitive types, such as numbers and
|
||||
booleans are never `null` when performing arithmetic, and that your program
|
||||
does not run into range error when operating on lists, letting the compiler
|
||||
remove some of the error checking code.
|
||||
|
||||
Specify these options in `pubspec.yaml`.
|
||||
|
||||
Example:
|
||||
|
||||
```yaml
|
||||
transformers:
|
||||
...
|
||||
- $dart2js:
|
||||
commandLineOptions:
|
||||
- --trust-type-annotations
|
||||
- --trust-primitives
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
### Change detection profiler
|
||||
|
||||
If your application is janky (it misses frames) or is slow according to other
|
||||
metrics, you need to find out why. This tool helps by measuring the average
|
||||
speed of _change detection_, a phase in Angular's
|
||||
lifecycle that detects changes in values that are bound to the UI.
|
||||
Janky UI updates can result from slowness either in _computing_ the changes or
|
||||
in _applying_ those changes to the UI.
|
||||
|
||||
For your app to be performant, the process of _computing_ changes must be very
|
||||
fast—preferably **under 3 milliseconds**.
|
||||
Fast change computation leaves room for
|
||||
the application logic, UI updates, and browser rendering pipeline
|
||||
to fit within a 16 ms frame (assuming a target frame rate of 60 FPS).
|
||||
|
||||
The change detection profiler repeatedly performs change detection
|
||||
without invoking any user actions, such as clicking buttons or entering
|
||||
text in input fields. It then computes the average amount of time
|
||||
(in milliseconds) to perform a single cycle of change detection and
|
||||
prints that to the console. This number depends on the current state of the UI. You are likely to see different numbers
|
||||
as you go from one screen in your application to another.
|
||||
|
||||
#### Running the profiler
|
||||
|
||||
Before running the profiler, enable the debugging tools
|
||||
and put the app into the state you want to measure:
|
||||
|
||||
1. If you haven't already done so,
|
||||
[enable the debugging tools](#enabling-the-debugging-tools).
|
||||
2. Navigate the app to a screen whose performance you want to profile.
|
||||
3. Make sure the screen is in a state that you want to measure.
|
||||
For example, you might want to profile the screen several times,
|
||||
with different amounts and kinds of data.
|
||||
|
||||
To run the profiler, enter the following in the dev console:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection();
|
||||
```
|
||||
|
||||
The results are visible in the console.
|
||||
|
||||
|
||||
#### Recording CPU profiles
|
||||
|
||||
To record a profile, pass `{record: true}` to `timeChangeDetection()`:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection({record: true});
|
||||
```
|
||||
|
||||
Then open the **Profiles** tab. The recorded profile has the title
|
||||
**Change Detection**. In Chrome, if you record the profile repeatedly, all the
|
||||
profiles are nested under Change Detection.
|
||||
|
||||
|
||||
#### Interpreting the numbers
|
||||
|
||||
In a properly designed application, repeated attempts to detect changes without
|
||||
any user actions result in no changes to the UI. It is
|
||||
also desirable to have the cost of a user action be proportional to the amount
|
||||
of UI changes required. For example, popping up a menu with 5 items should be
|
||||
vastly faster than rendering a table of 500 rows and 10 columns. Therefore,
|
||||
change detection with no UI updates should be as fast as possible.
|
||||
|
||||
#### Investigating slow change detection
|
||||
|
||||
So you found a screen in your application on which the profiler reports a very
|
||||
high number (i.e. >3ms). This is where a recorded CPU profile can help. Enable
|
||||
recording while profiling:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection({record: true});
|
||||
```
|
||||
|
||||
Then look for hot spots using
|
||||
[Chrome CPU profiler](https://developer.chrome.com/devtools/docs/cpu-profiling).
|
||||
|
||||
#### Reducing change detection cost
|
||||
|
||||
There are many reasons for slow change detection. To gain intuition about
|
||||
possible causes it helps to understand how change detection works. Such a
|
||||
discussion is outside the scope of this document,
|
||||
but here are some key concepts.
|
||||
|
||||
<!-- TODO: link to change detection docs -->
|
||||
|
||||
By default, Angular uses a _dirty checking_ mechanism to find model changes.
|
||||
This mechanism involves evaluating every bound expression that's active on the
|
||||
UI. These usually include text interpolation via `{{expression}}` and property
|
||||
bindings via `[prop]="expression"`. If any of the evaluated expressions are
|
||||
costly to compute, they might contribute to slow change detection. A good way to
|
||||
speed things up is to use plain class fields in your expressions and avoid any
|
||||
kind of computation. For example:
|
||||
|
||||
```dart
|
||||
@View(
|
||||
template: '<button [enabled]="isEnabled">{{title}}</button>'
|
||||
)
|
||||
class FancyButton {
|
||||
// GOOD: no computation, just returns the value
|
||||
bool isEnabled;
|
||||
|
||||
// BAD: computes the final value upon request
|
||||
String _title;
|
||||
String get title => _title.trim().toUpperCase();
|
||||
}
|
||||
```
|
||||
|
||||
Most cases like these can be solved by precomputing the value and storing the
|
||||
final value in a field.
|
||||
|
||||
Angular also supports a second type of change detection: the _push_ model. In
|
||||
this model, Angular does not poll your component for changes. Instead, the
|
||||
component tells Angular when it changes, and only then does Angular perform
|
||||
the update. This model is suitable in situations when your data model uses
|
||||
observable or immutable objects.
|
||||
|
||||
<!-- TODO: link to discussion of push model -->
|
140
TOOLS_JS.md
Normal file
140
TOOLS_JS.md
Normal file
@ -0,0 +1,140 @@
|
||||
# Developer Tools for JavaScript
|
||||
|
||||
Here you will find a collection of tools and tips for keeping your application
|
||||
perform well and contain fewer bugs.
|
||||
|
||||
## Angular debug tools in the dev console
|
||||
|
||||
Angular provides a set of debug tools that are accessible from any browser's
|
||||
developer console. In Chrome the dev console can be accessed by pressing
|
||||
Ctrl + Shift + j.
|
||||
|
||||
### Enabling debug tools
|
||||
|
||||
By default the debug tools are disabled. You can enable debug tools as follows:
|
||||
|
||||
```typescript
|
||||
import {enableDebugTools} from 'angular2/platform/browser';
|
||||
|
||||
bootstrap(Application).then((appRef) => {
|
||||
enableDebugTools(appRef);
|
||||
});
|
||||
```
|
||||
|
||||
### Using debug tools
|
||||
|
||||
In the browser open the developer console (Ctrl + Shift + j in Chrome). The
|
||||
top level object is called `ng` and contains more specific tools inside it.
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection();
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
### Change detection profiler
|
||||
|
||||
If your application is janky (it misses frames) or is slow according to other
|
||||
metrics it is important to find the root cause of the issue. Change detection
|
||||
is a phase in Angular's lifecycle that detects changes in values that are
|
||||
bound to UI, and if it finds a change it performs the corresponding UI update.
|
||||
However, sometimes it is hard to tell if the slowness is due to the act of
|
||||
computing the changes being slow, or due to the act of applying those changes
|
||||
to the UI. For your application to be performant it is important that the
|
||||
process of computing changes is very fast. For best results it should be under
|
||||
3 milliseconds in order to leave room for the application logic, the UI updates
|
||||
and browser's rendering pipeline to fit withing the 16 millisecond frame
|
||||
(assuming the 60 FPS target frame rate).
|
||||
|
||||
Change detection profiler repeatedly performs change detection without invoking
|
||||
any user actions, such as clicking buttons or entering text in input fields. It
|
||||
then computes the average amount of time it took to perform a single cycle of
|
||||
change detection in milliseconds and prints it to the console. This number
|
||||
depends on the current state of the UI. You will likely see different numbers
|
||||
as you go from one screen in your application to another.
|
||||
|
||||
#### Running the profiler
|
||||
|
||||
Enable debug tools (see above), then in the dev console enter the following:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection();
|
||||
```
|
||||
|
||||
The results will be printed to the console.
|
||||
|
||||
#### Recording CPU profile
|
||||
|
||||
Pass `{record: true}` an argument:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection({record: true});
|
||||
```
|
||||
|
||||
Then open the "Profiles" tab. You will see the recorded profile titled
|
||||
"Change Detection". In Chrome, if you record the profile repeatedly, all the
|
||||
profiles will be nested under "Change Detection".
|
||||
|
||||
#### Interpreting the numbers
|
||||
|
||||
In a properly-designed application repeated attempts to detect changes without
|
||||
any user actions should result in no changes to be applied on the UI. It is
|
||||
also desirable to have the cost of a user action be proportional to the amount
|
||||
of UI changes required. For example, popping up a menu with 5 items should be
|
||||
vastly faster than rendering a table of 500 rows and 10 columns. Therefore,
|
||||
change detection with no UI updates should be as fast as possible. Ideally the
|
||||
number printed by the profiler should be well below the length of a single
|
||||
animation frame (16ms). A good rule of thumb is to keep it under 3ms.
|
||||
|
||||
#### Investigating slow change detection
|
||||
|
||||
So you found a screen in your application on which the profiler reports a very
|
||||
high number (i.e. >3ms). This is where a recorded CPU profile can help. Enable
|
||||
recording while profiling:
|
||||
|
||||
```javascript
|
||||
ng.profiler.timeChangeDetection({record: true});
|
||||
```
|
||||
|
||||
Then look for hot spots using
|
||||
[Chrome CPU profiler](https://developer.chrome.com/devtools/docs/cpu-profiling).
|
||||
|
||||
#### Reducing change detection cost
|
||||
|
||||
There are many reasons for slow change detection. To gain intuition about
|
||||
possible causes it would help to understand how change detection works. Such a
|
||||
discussion is outside the scope of this document (TODO link to docs), but here
|
||||
are some key concepts in brief.
|
||||
|
||||
By default Angular uses "dirty checking" mechanism for finding model changes.
|
||||
This mechanism involves evaluating every bound expression that's active on the
|
||||
UI. These usually include text interpolation via `{{expression}}` and property
|
||||
bindings via `[prop]="expression"`. If any of the evaluated expressions are
|
||||
costly to compute they could contribute to slow change detection. A good way to
|
||||
speed things up is to use plain class fields in your expressions and avoid any
|
||||
kinds of computation. Example:
|
||||
|
||||
```typescript
|
||||
@Component({
|
||||
template: '<button [enabled]="isEnabled">{{title}}</button>'
|
||||
})
|
||||
class FancyButton {
|
||||
// GOOD: no computation, just return the value
|
||||
isEnabled: boolean;
|
||||
|
||||
// BAD: computes the final value upon request
|
||||
_title: String;
|
||||
get title(): String { return this._title.trim().toUpperCase(); }
|
||||
}
|
||||
```
|
||||
|
||||
Most cases like these could be solved by precomputing the value and storing the
|
||||
final value in a field.
|
||||
|
||||
Angular also supports a second type of change detection - the "push" model. In
|
||||
this model Angular does not poll your component for changes. Instead, the
|
||||
component "tells" Angular when it changes and only then does Angular perform
|
||||
the update. This model is suitable in situations when your data model uses
|
||||
observable or immutable objects (also a discussion for another time).
|
168
TRIAGE_AND_LABELS.md
Normal file
168
TRIAGE_AND_LABELS.md
Normal file
@ -0,0 +1,168 @@
|
||||
# Triage Process and Github Labels for Angular 2
|
||||
|
||||
This document describes how the Angular team uses labels and milestones to triage issues on github.
|
||||
|
||||
# Issues and PRs
|
||||
## Triaged vs Untriaged Issues
|
||||
|
||||
Every triaged issue must have four attributes assigned to it:
|
||||
|
||||
* `priority` -- P0 through P4. P0 issues are "drop everything and do this now". P4 are nice to have.
|
||||
* `component` -- Which area of Angular knowledge this relates to.
|
||||
* `effort` -- Rough assessment of how much work this issue is. E.g. `effort: easy` means
|
||||
"probably a few hours of work".
|
||||
* `type` -- Whether this issue is a bug, feature, or other kind of task.
|
||||
|
||||
Untriaged issues are any issues in the queue that don't yet have these four attributes.
|
||||
|
||||
You can view a report of untriaged issues here, in our
|
||||
[Angular Triage Dashboard](http://mhevery.github.io/github_issues/).
|
||||
|
||||
Issues should also have a clear action to complete that can be addressed or resolved within the
|
||||
scope of Angular 2. We'll close issues that don't meet these criteria.
|
||||
|
||||
### Assigning Issues to Milestones
|
||||
|
||||
Any issue that is being worked on must have:
|
||||
|
||||
* An `assignee`: The person doing the work.
|
||||
* A `Milestone`: When we expect to complete this work.
|
||||
|
||||
We aim to only have at most three milestones open at a time:
|
||||
|
||||
* Closing Milestone: A milestone with a very small number of issues, about to release.
|
||||
* Current Milestone: Work that we plan to complete within one week.
|
||||
* Next Milestone: Work that is > 1 week but current for the team.
|
||||
|
||||
The [backlog](https://github.com/angular/angular/issues?q=is%3Aopen+is%3Aissue+no%3Amilestone)
|
||||
consists of all issues that have been triaged but do not have an assignee or milestone.
|
||||
|
||||
## Triaged vs Untriaged PRs
|
||||
|
||||
Because of the cumulative pain associated with rebasing PRs, we triage PRs daily, and
|
||||
closing or reviewing PRs is a top priority ahead of other ongoing work.
|
||||
|
||||
Every triaged PR must have a `pr_action` label assigned to it and an assignee:
|
||||
|
||||
* `pr_action: review` -- work is complete and comment is needed from the assignee.
|
||||
* `pr_action: cleanup` -- more work is needed from the current assignee.
|
||||
* `pr_action: discuss` -- discussion is needed, to be led by the current assignee.
|
||||
* `pr_action: merge` -- the PR should be merged. Add this to a PR when you would like to
|
||||
trigger automatic merging following a successful build. This is described in [COMMITTER.md](COMMITTER.md).
|
||||
|
||||
In addition, PRs can have the following states:
|
||||
|
||||
* `pr_state: LGTM` -- PR may have outstanding changes but does not require further review.
|
||||
* `pr_state: WIP` -- PR is experimental or rapidly changing. Not ready for review or triage.
|
||||
* `pr_state: blocked` -- PR is blocked on an issue or other PR. Not ready for review or triage.
|
||||
|
||||
Note that an LGTM state does not mean a PR is ready to merge: for example, a reviewer might set the
|
||||
LGTM state but request a minor tweak that doesn't need further review, e.g., a rebase or small
|
||||
uncontroversial change.
|
||||
|
||||
PRs do not need to be assigned to milestones, unless a milestone release should be held for that
|
||||
PR to land.
|
||||
|
||||
Victor (`vsavkin`) and Tobias (`tbosch`) are owners of the PR queue. Here is a list of [current
|
||||
untriaged PRs](https://github.com/angular/angular/pulls?utf8=%E2%9C%93&q=is%3Aopen+no%3Amilestone+is%3Apr+-label%3A%22pr_action%3A+cleanup%22+-label%3A%22pr_action%3A+merge%22+-label%3A%22pr_action%3A+review%22+-label%3A%22pr_action%3A+discuss%22+-label%3A%22pr_state%3A+blocked%22+-label%3A%22pr_state%3A+WIP%22+).
|
||||
|
||||
# Prioritization of Work
|
||||
|
||||
What should you be working on?
|
||||
|
||||
1. Any PRs that are assigned to you that don't have `pr_state: WIP` or `pr_state: blocked`
|
||||
1. Any issues that are assigned to you in the lowest-numbered Milestone
|
||||
1. Any issues that are assigned to you in any Milestone
|
||||
|
||||
If there are no issues assigned to you in any Milestone, pick an issue, self-assign it, and add
|
||||
it to the most appropriate Milestone based on effort.
|
||||
|
||||
Here are some suggestions for what to work on next:
|
||||
|
||||
* Filter for issues in a component that you are knowledgeable about, and pick something that has a
|
||||
high priority.
|
||||
* Filter for any small effort task that has the special `cust: GT` or `cust:Ionic` tags,
|
||||
and priority > P3.
|
||||
* Add a new task that's really important, add `component`, `priority`, `effort`, `type` and
|
||||
assign it to yourself and the most appropriate milestone.
|
||||
|
||||
# Labels Used in Triage
|
||||
|
||||
## Priority
|
||||
How urgent is this issue? We use priority to determine what should be worked on in each new
|
||||
milestone.
|
||||
|
||||
* `P0: critical` -- drop everything to work on this
|
||||
* `P1: urgent` -- resolve quickly in the current milestone. people are blocked
|
||||
* `P2: required` -- needed for development but not urgent yet. workaround exists, or e.g. new API
|
||||
* `P3: important` -- must complete before Angular 2 is ready for release
|
||||
* `P4: nice to have` -- a good idea, but maybe not until after release
|
||||
|
||||
|
||||
## Effort
|
||||
Rough, non-binding estimate of how much work this issue represents. Please change this assessment
|
||||
for anything you're working on to better reflect reality.
|
||||
|
||||
* `effort: easy` -- straightforward issue that can be resolved in a few hours, e.g. < 1 day of work.
|
||||
* `effort: medium` -- issue that will be a few days of work. Can be completed within a single
|
||||
milestone.
|
||||
* `effort: tough` -- issue that will likely take more than 1 milestone to complete.
|
||||
|
||||
<!-- We don't like these label names as
|
||||
they're not absolute (what is one developer-hour, really?) but decided it wasn't worth arguing
|
||||
over terms. -->
|
||||
|
||||
## Component
|
||||
Which area of Angular knowledge is this issue most closely related to? Helpful when deciding what
|
||||
to work on next.
|
||||
|
||||
* `comp: benchpress` -- benchmarks and performance testing → *tbosch*, *crossj*
|
||||
* `comp: build/dev-productivity` -- build process, e.g. CLI and related tasks → *iminar*, *caitp*
|
||||
* `comp: build/pipeline` -- build pipeline, e.g. ts2dart → *mprobst*, *alexeagle*
|
||||
* `comp: core` -- general core Angular issues, not related to a sub-category (see below) →
|
||||
*mhevery*
|
||||
* `comp: core/animations` -- animations framework → *matsko*
|
||||
* `comp: core/change_detection` -- change detection → *vsavkin*
|
||||
* `comp: core/di` -- dependency injection → *vicb*, *rkirov*
|
||||
* `comp: core/directives` -- directives
|
||||
* `comp: core/forms` -- forms → *vsavkin*
|
||||
* `comp: core/pipes` -- pipes
|
||||
* `comp: core/view` -- runtime processing of the `View`s
|
||||
* `comp: core/view/compiler` -- static analysis of the templates which generate `ProtoView`s.
|
||||
* `comp: core/testbed` -- e2e tests and support for them
|
||||
* `comp: core/webworker` -- core web worker infrastructure
|
||||
* `comp: dart-transformer` -- Dart transforms → *kegluneq*, *jakemac*
|
||||
* `comp: data-access` -- → *jeffbcross*
|
||||
* `comp: docs` -- API docs and doc generation → *naomiblack*, *petebacondarwin*
|
||||
* `comp: material-components` -- Angular Material components built in Angular 2 → *jelbourn*
|
||||
* `comp: router` -- Component Router → *btford*, *igorminar*, *matsko*
|
||||
* `comp: wrenchjs`
|
||||
|
||||
## Type
|
||||
What kind of problem is this?
|
||||
|
||||
* `type RFC / discussion / question`
|
||||
* `type bug`
|
||||
* `type chore`
|
||||
* `type feature`
|
||||
* `type performance`
|
||||
* `type refactor`
|
||||
|
||||
## Special Labels
|
||||
|
||||
### action:design
|
||||
More active discussion is needed before the issue can be worked on further. Typically used for
|
||||
`type: feature` or `type: RFC/discussion/question`
|
||||
|
||||
[See all issues that need discussion](https://github.com/angular/angular/labels/action:%20Design)
|
||||
|
||||
### cla
|
||||
Managed by googlebot. Indicates whether a PR has a CLA on file for its author(s). Only issues with
|
||||
`cla:yes` should be merged into master.
|
||||
|
||||
### cust
|
||||
This is an issue causing user pain for early adopter customers `cust: GT` or `cust: Ionic`.
|
||||
|
||||
### WORKS_AS_INTENDED
|
||||
|
||||
Only used on closed issues, to indicate to the reporter why we closed it.
|
120
WORKSPACE
120
WORKSPACE
@ -1,120 +0,0 @@
|
||||
workspace(
|
||||
name = "angular",
|
||||
managed_directories = {"@npm": ["node_modules"]},
|
||||
)
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
# Fetch rules_nodejs so we can install our npm dependencies
|
||||
http_archive(
|
||||
name = "build_bazel_rules_nodejs",
|
||||
sha256 = "b6670f9f43faa66e3009488bbd909bc7bc46a5a9661a33f6bc578068d1837f37",
|
||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.3.0/rules_nodejs-1.3.0.tar.gz"],
|
||||
)
|
||||
|
||||
# Check the bazel version and download npm dependencies
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "check_bazel_version", "check_rules_nodejs_version", "node_repositories", "yarn_install")
|
||||
|
||||
# Bazel version must be at least the following version because:
|
||||
# - 0.26.0 managed_directories feature added which is required for nodejs rules 0.30.0
|
||||
# - 0.27.0 has a fix for managed_directories after `rm -rf node_modules`
|
||||
# - 2.1.0 feature added to honor .bazelignore in external repositories
|
||||
check_bazel_version(
|
||||
message = """
|
||||
You no longer need to install Bazel on your machine.
|
||||
Angular has a dependency on the @bazel/bazel package which supplies it.
|
||||
Try running `yarn bazel` instead.
|
||||
(If you did run that, check that you've got a fresh `yarn install`)
|
||||
|
||||
""",
|
||||
minimum_bazel_version = "2.1.0",
|
||||
)
|
||||
|
||||
check_rules_nodejs_version(minimum_version_string = "1.3.0")
|
||||
|
||||
# Setup the Node.js toolchain
|
||||
node_repositories(
|
||||
node_repositories = {
|
||||
"12.14.1-darwin_amd64": ("node-v12.14.1-darwin-x64.tar.gz", "node-v12.14.1-darwin-x64", "0be10a28737527a1e5e3784d3ad844d742fe8b0718acd701fd48f718fd3af78f"),
|
||||
"12.14.1-linux_amd64": ("node-v12.14.1-linux-x64.tar.xz", "node-v12.14.1-linux-x64", "07cfcaa0aa9d0fcb6e99725408d9e0b07be03b844701588e3ab5dbc395b98e1b"),
|
||||
"12.14.1-windows_amd64": ("node-v12.14.1-win-x64.zip", "node-v12.14.1-win-x64", "1f96ccce3ba045ecea3f458e189500adb90b8bc1a34de5d82fc10a5bf66ce7e3"),
|
||||
},
|
||||
node_version = "12.14.1",
|
||||
package_json = ["//:package.json"],
|
||||
)
|
||||
|
||||
load("//integration:angular_integration_test.bzl", "npm_package_archives")
|
||||
|
||||
yarn_install(
|
||||
name = "npm",
|
||||
manual_build_file_contents = npm_package_archives(),
|
||||
package_json = "//:package.json",
|
||||
yarn_lock = "//:yarn.lock",
|
||||
)
|
||||
|
||||
# Install all bazel dependencies of the @npm npm packages
|
||||
load("@npm//:install_bazel_dependencies.bzl", "install_bazel_dependencies")
|
||||
|
||||
install_bazel_dependencies()
|
||||
|
||||
# Load angular dependencies
|
||||
load("//packages/bazel:package.bzl", "rules_angular_dev_dependencies")
|
||||
|
||||
rules_angular_dev_dependencies()
|
||||
|
||||
# Load protractor dependencies
|
||||
load("@npm_bazel_protractor//:package.bzl", "npm_bazel_protractor_dependencies")
|
||||
|
||||
npm_bazel_protractor_dependencies()
|
||||
|
||||
# Load karma dependencies
|
||||
load("@npm_bazel_karma//:package.bzl", "npm_bazel_karma_dependencies")
|
||||
|
||||
npm_bazel_karma_dependencies()
|
||||
|
||||
# Setup the rules_webtesting toolchain
|
||||
load("@io_bazel_rules_webtesting//web:repositories.bzl", "web_test_repositories")
|
||||
|
||||
web_test_repositories()
|
||||
|
||||
load("//tools/browsers:browser_repositories.bzl", "browser_repositories")
|
||||
|
||||
browser_repositories()
|
||||
|
||||
# Setup the rules_typescript tooolchain
|
||||
load("@npm_bazel_typescript//:index.bzl", "ts_setup_workspace")
|
||||
|
||||
ts_setup_workspace()
|
||||
|
||||
# Setup the rules_sass toolchain
|
||||
load("@io_bazel_rules_sass//sass:sass_repositories.bzl", "sass_repositories")
|
||||
|
||||
sass_repositories()
|
||||
|
||||
# Setup the skydoc toolchain
|
||||
load("@io_bazel_skydoc//skylark:skylark.bzl", "skydoc_repositories")
|
||||
|
||||
skydoc_repositories()
|
||||
|
||||
load("@bazel_toolchains//rules:environments.bzl", "clang_env")
|
||||
load("@bazel_toolchains//rules:rbe_repo.bzl", "rbe_autoconfig")
|
||||
|
||||
rbe_autoconfig(
|
||||
name = "rbe_ubuntu1604_angular",
|
||||
# Need to specify a base container digest in order to ensure that we can use the checked-in
|
||||
# platform configurations for the "ubuntu16_04" image. Otherwise the autoconfig rule would
|
||||
# need to pull the image and run it in order determine the toolchain configuration. See:
|
||||
# https://github.com/bazelbuild/bazel-toolchains/blob/1.1.2/configs/ubuntu16_04_clang/versions.bzl
|
||||
base_container_digest = "sha256:1ab40405810effefa0b2f45824d6d608634ccddbf06366760c341ef6fbead011",
|
||||
# Note that if you change the `digest`, you might also need to update the
|
||||
# `base_container_digest` to make sure marketplace.gcr.io/google/rbe-ubuntu16-04-webtest:<digest>
|
||||
# and marketplace.gcr.io/google/rbe-ubuntu16-04:<base_container_digest> have
|
||||
# the same Clang and JDK installed. Clang is needed because of the dependency on
|
||||
# @com_google_protobuf. Java is needed for the Bazel's test executor Java tool.
|
||||
digest = "sha256:0b8fa87db4b8e5366717a7164342a029d1348d2feea7ecc4b18c780bc2507059",
|
||||
env = clang_env(),
|
||||
registry = "marketplace.gcr.io",
|
||||
# We can't use the default "ubuntu16_04" RBE image provided by the autoconfig because we need
|
||||
# a specific Linux kernel that comes with "libx11" in order to run headless browser tests.
|
||||
repository = "google/rbe-ubuntu16-04-webtest",
|
||||
)
|
48
aio/.gitignore
vendored
48
aio/.gitignore
vendored
@ -1,48 +0,0 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# compiled output
|
||||
/dist
|
||||
/out-tsc
|
||||
/src/generated
|
||||
/tmp
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
|
||||
# IDEs and editors
|
||||
/.idea
|
||||
.project
|
||||
.classpath
|
||||
.c9/
|
||||
*.launch
|
||||
.settings/
|
||||
*.sublime-workspace
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
/.firebase/
|
||||
/.sass-cache
|
||||
/connect.lock
|
||||
/coverage
|
||||
/libpeerconnection.log
|
||||
debug.log
|
||||
firebase-debug.log
|
||||
npm-debug.log
|
||||
testem.log
|
||||
/typings
|
||||
yarn-error.log
|
||||
|
||||
# e2e
|
||||
/e2e/*.js
|
||||
/e2e/*.map
|
||||
protractor-results*.txt
|
||||
|
||||
# System Files
|
||||
.DS_Store
|
||||
Thumbs.db
|
140
aio/README.md
140
aio/README.md
@ -1,140 +0,0 @@
|
||||
# Angular documentation project (https://angular.io)
|
||||
|
||||
Everything in this folder is part of the documentation project. This includes
|
||||
|
||||
* the web site for displaying the documentation
|
||||
* the dgeni configuration for converting source files to rendered files that can be viewed in the web site.
|
||||
* the tooling for setting up examples for development; and generating live-example and zip files from the examples.
|
||||
|
||||
## Developer tasks
|
||||
|
||||
We use [Yarn](https://yarnpkg.com) to manage the dependencies and to run build tasks.
|
||||
You should run all these tasks from the `angular/aio` folder.
|
||||
Here are the most important tasks you might need to use:
|
||||
|
||||
* `yarn` - install all the dependencies.
|
||||
* `yarn setup` - install all the dependencies, boilerplate, stackblitz, zips and run dgeni on the docs.
|
||||
* `yarn setup-local` - same as `setup`, but build the Angular packages from the source code and use these locally built versions (instead of the ones fetched from npm) for aio and docs examples boilerplate.
|
||||
|
||||
* `yarn build` - create a production build of the application (after installing dependencies, boilerplate, etc).
|
||||
* `yarn build-local` - same as `build`, but use `setup-local` instead of `setup`.
|
||||
* `yarn build-local-with-viewengine` - same as `build-local`, but in addition also turns on `ViewEngine` mode in aio.
|
||||
(Note: Docs examples run in `ViewEngine` mode by default. To turn on `ivy` mode in examples, see `yarn boilerplate:add` below.)
|
||||
|
||||
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
|
||||
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
|
||||
* `yarn lint` - check that the doc-viewer code follows our style rules.
|
||||
* `yarn test` - watch all the source files, for the doc-viewer, and run all the unit tests when any change.
|
||||
* `yarn test --watch=false` - run all the unit tests once.
|
||||
* `yarn e2e` - run all the e2e tests for the doc-viewer.
|
||||
|
||||
* `yarn docs` - generate all the docs from the source files.
|
||||
* `yarn docs-watch` - watch the Angular source and the docs files and run a short-circuited doc-gen for the docs that changed.
|
||||
* `yarn docs-lint` - check that the doc gen code follows our style rules.
|
||||
* `yarn docs-test` - run the unit tests for the doc generation code.
|
||||
|
||||
* `yarn boilerplate:add` - generate all the boilerplate code for the examples, so that they can be run locally.
|
||||
* `yarn boilerplate:add:ivy` - same as `boilerplate:add` but also turns on `ivy` mode.
|
||||
|
||||
* `yarn boilerplate:remove` - remove all the boilerplate code that was added via `yarn boilerplate:add`.
|
||||
* `yarn generate-stackblitz` - generate the stackblitz files that are used by the `live-example` tags in the docs.
|
||||
* `yarn generate-zips` - generate the zip files from the examples. Zip available via the `live-example` tags in the docs.
|
||||
|
||||
* `yarn example-e2e` - run all e2e tests for examples. Available options:
|
||||
- `--setup`: generate boilerplate, force webdriver update & other setup, then run tests.
|
||||
- `--local`: run e2e tests with the local version of Angular contained in the "dist" folder.
|
||||
_Requires `--setup` in order to take effect._
|
||||
- `--ivy`: run e2e tests in `ivy` mode.
|
||||
- `--filter=foo`: limit e2e tests to those containing the word "foo".
|
||||
|
||||
> **Note for Windows users**
|
||||
>
|
||||
> Setting up the examples involves creating some [symbolic links](https://en.wikipedia.org/wiki/Symbolic_link) (see [here](./tools/examples/README.md#symlinked-node_modules) for details). On Windows, this requires to either have [Developer Mode enabled](https://blogs.windows.com/windowsdeveloper/2016/12/02/symlinks-windows-10) (supported on Windows 10 or newer) or run the setup commands as administrator.
|
||||
>
|
||||
> The affected commands are:
|
||||
> - `yarn setup` / `yarn setup-*`
|
||||
> - `yarn build` / `yarn build-*`
|
||||
> - `yarn boilerplate:add`
|
||||
> - `yarn example-e2e --setup`
|
||||
|
||||
## Using ServiceWorker locally
|
||||
|
||||
Running `yarn start` (even when explicitly targeting production mode) does not set up the
|
||||
ServiceWorker. If you want to test the ServiceWorker locally, you can use `yarn build` and then
|
||||
serve the files in `dist/` with `yarn http-server dist -p 4200`.
|
||||
|
||||
|
||||
## Guide to authoring
|
||||
|
||||
There are two types of content in the documentation:
|
||||
|
||||
* **API docs**: descriptions of the modules, classes, interfaces, decorators, etc that make up the Angular platform.
|
||||
API docs are generated directly from the source code.
|
||||
The source code is contained in TypeScript files, located in the `angular/packages` folder.
|
||||
Each API item may have a preceding comment, which contains JSDoc style tags and content.
|
||||
The content is written in markdown.
|
||||
|
||||
* **Other content**: guides, tutorials, and other marketing material.
|
||||
All other content is written using markdown in text files, located in the `angular/aio/content` folder.
|
||||
More specifically, there are sub-folders that contain particular types of content: guides, tutorial and marketing.
|
||||
|
||||
* **Code examples**: code examples need to be testable to ensure their accuracy.
|
||||
Also, our examples have a specific look and feel and allow the user to copy the source code. For larger
|
||||
examples they are rendered in a tabbed interface (e.g. template, HTML, and TypeScript on separate
|
||||
tabs). Additionally, some are live examples, which provide links where the code can be edited, executed, and/or downloaded. For details on working with code examples, please read the [Code snippets](https://angular.io/guide/docs-style-guide#code-snippets), [Source code markup](https://angular.io/guide/docs-style-guide#source-code-markup), and [Live examples](https://angular.io/guide/docs-style-guide#live-examples) pages of the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
||||
|
||||
We use the [dgeni](https://github.com/angular/dgeni) tool to convert these files into docs that can be viewed in the doc-viewer.
|
||||
|
||||
The [Authors Style Guide](https://angular.io/guide/docs-style-guide) prescribes guidelines for
|
||||
writing guide pages, explains how to use the documentation classes and components, and how to markup sample source code to produce code snippets.
|
||||
|
||||
### Generating the complete docs
|
||||
|
||||
The main task for generating the docs is `yarn docs`. This will process all the source files (API and other),
|
||||
extracting the documentation and generating JSON files that can be consumed by the doc-viewer.
|
||||
|
||||
### Partial doc generation for editors
|
||||
|
||||
Full doc generation can take up to one minute. That's too slow for efficient document creation and editing.
|
||||
|
||||
You can make small changes in a smart editor that displays formatted markdown:
|
||||
>In VS Code, _Cmd-K, V_ opens markdown preview in side pane; _Cmd-B_ toggles left sidebar
|
||||
|
||||
You also want to see those changes displayed properly in the doc viewer
|
||||
with a quick, edit/view cycle time.
|
||||
|
||||
For this purpose, use the `yarn docs-watch` task, which watches for changes to source files and only
|
||||
re-processes the files necessary to generate the docs that are related to the file that has changed.
|
||||
Since this task takes shortcuts, it is much faster (often less than 1 second) but it won't produce full
|
||||
fidelity content. For example, links to other docs and code examples may not render correctly. This is
|
||||
most particularly noticed in links to other docs and in the embedded examples, which may not always render
|
||||
correctly.
|
||||
|
||||
The general setup is as follows:
|
||||
|
||||
* Open a terminal, ensure the dependencies are installed; run an initial doc generation; then start the doc-viewer:
|
||||
|
||||
```bash
|
||||
yarn setup
|
||||
yarn start
|
||||
```
|
||||
|
||||
* Open a second terminal and start watching the docs
|
||||
|
||||
```bash
|
||||
yarn docs-watch
|
||||
```
|
||||
|
||||
>Alternatively, try the consolidated `serve-and-sync` command that builds, watches and serves in the same terminal window
|
||||
```bash
|
||||
yarn serve-and-sync
|
||||
```
|
||||
|
||||
* Open a browser at https://localhost:4200/ and navigate to the document on which you want to work.
|
||||
You can automatically open the browser by using `yarn start -o` in the first terminal.
|
||||
|
||||
* Make changes to the page's associated doc or example files. Every time a file is saved, the doc will
|
||||
be regenerated, the app will rebuild and the page will reload.
|
||||
|
||||
* If you get a build error complaining about examples or any other odd behavior, be sure to consult
|
||||
the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
@ -1,3 +0,0 @@
|
||||
scripts-js/lib
|
||||
scripts-js/node_modules
|
||||
scripts-js/**/test
|
@ -1,175 +0,0 @@
|
||||
# Image metadata and config
|
||||
FROM debian:stretch
|
||||
|
||||
LABEL name="angular.io PR preview" \
|
||||
description="This image implements the PR preview functionality for angular.io." \
|
||||
vendor="Angular" \
|
||||
version="1.0"
|
||||
|
||||
VOLUME /aio-secrets
|
||||
VOLUME /var/www/aio-builds
|
||||
VOLUME /dockerbuild
|
||||
|
||||
EXPOSE 80 443
|
||||
|
||||
|
||||
# Build-time args and env vars
|
||||
# The AIO_ARTIFACT_PATH path needs to be kept in synch with the value of
|
||||
# `aio_preview->steps->store_artifacts->destination` property in `.circleci/config.yml`
|
||||
ARG AIO_ARTIFACT_PATH=aio/dist/aio-snapshot.tgz
|
||||
ARG TEST_AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH
|
||||
ARG AIO_BUILDS_DIR=/var/www/aio-builds
|
||||
ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds
|
||||
ARG AIO_DOMAIN_NAME=ngbuilds.io
|
||||
ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost
|
||||
ARG AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG TEST_AIO_GITHUB_ORGANIZATION=test-org
|
||||
ARG AIO_GITHUB_REPO=angular
|
||||
ARG TEST_AIO_GITHUB_REPO=test-repo
|
||||
ARG AIO_GITHUB_TEAM_SLUGS=aio-auto-previews,aio-contributors
|
||||
ARG TEST_AIO_GITHUB_TEAM_SLUGS=test-team-1,test-team-2
|
||||
ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME
|
||||
ARG TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_DOMAIN_NAME
|
||||
ARG AIO_NGINX_PORT_HTTP=80
|
||||
ARG TEST_AIO_NGINX_PORT_HTTP=8080
|
||||
ARG AIO_NGINX_PORT_HTTPS=443
|
||||
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||
ARG AIO_SIGNIFICANT_FILES_PATTERN='^(?:aio|packages)/(?!.*[._]spec\\.[jt]s$)'
|
||||
ARG TEST_AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN
|
||||
ARG AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG AIO_ARTIFACT_MAX_SIZE=26214400
|
||||
ARG TEST_AIO_ARTIFACT_MAX_SIZE=200
|
||||
ARG AIO_PREVIEW_SERVER_PORT=3000
|
||||
ARG TEST_AIO_PREVIEW_SERVER_PORT=3001
|
||||
|
||||
ENV AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH TEST_AIO_ARTIFACT_PATH=$TEST_AIO_ARTIFACT_PATH \
|
||||
AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
|
||||
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
|
||||
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
|
||||
AIO_GITHUB_REPO=$AIO_GITHUB_REPO TEST_AIO_GITHUB_REPO=$TEST_AIO_GITHUB_REPO \
|
||||
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
|
||||
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
|
||||
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
|
||||
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
|
||||
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
|
||||
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
|
||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||
AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN TEST_AIO_SIGNIFICANT_FILES_PATTERN=$TEST_AIO_SIGNIFICANT_FILES_PATTERN \
|
||||
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
|
||||
AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME TEST_AIO_PREVIEW_SERVER_HOSTNAME=$TEST_AIO_PREVIEW_SERVER_HOSTNAME \
|
||||
AIO_ARTIFACT_MAX_SIZE=$AIO_ARTIFACT_MAX_SIZE TEST_AIO_ARTIFACT_MAX_SIZE=$TEST_AIO_ARTIFACT_MAX_SIZE \
|
||||
AIO_PREVIEW_SERVER_PORT=$AIO_PREVIEW_SERVER_PORT TEST_AIO_PREVIEW_SERVER_PORT=$TEST_AIO_PREVIEW_SERVER_PORT \
|
||||
AIO_WWW_USER=www-data \
|
||||
NODE_ENV=production
|
||||
|
||||
|
||||
# Create directory for logs
|
||||
RUN mkdir /var/log/aio
|
||||
|
||||
|
||||
# Add extra package sources
|
||||
RUN apt-get update -y && apt-get install -y curl
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_10.x | bash -
|
||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
RUN echo "deb http://ftp.debian.org/debian stretch-backports main" | tee /etc/apt/sources.list.d/backports.list
|
||||
|
||||
|
||||
# Install packages
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
cron=3.0pl1-128+deb9u1 \
|
||||
dnsmasq=2.76-5+deb9u2 \
|
||||
nano=2.7.4-1 \
|
||||
nginx=1.10.3-1+deb9u2 \
|
||||
nodejs=10.15.3-1nodesource1 \
|
||||
openssl=1.1.0j-1~deb9u1 \
|
||||
rsyslog=8.24.0-1 \
|
||||
yarn=1.15.2-1
|
||||
RUN yarn global add pm2@3.5.0
|
||||
|
||||
|
||||
# Set up log rotation
|
||||
COPY logrotate/* /etc/logrotate.d/
|
||||
RUN chmod 0644 /etc/logrotate.d/*
|
||||
|
||||
|
||||
# Set up cronjobs
|
||||
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
|
||||
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
|
||||
RUN crontab /etc/cron.d/aio-builds-cleanup
|
||||
RUN printenv | grep AIO_ >> /etc/environment
|
||||
|
||||
|
||||
# Set up dnsmasq
|
||||
COPY dnsmasq/dnsmasq.conf /etc/
|
||||
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
|
||||
|
||||
# Set up SSL/TLS certificates
|
||||
COPY nginx/create-selfsigned-cert.sh /tmp/
|
||||
RUN chmod a+x /tmp/create-selfsigned-cert.sh
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-prod" "$AIO_NGINX_HOSTNAME" "$AIO_LOCALCERTS_DIR"
|
||||
RUN /tmp/create-selfsigned-cert.sh "selfcert-test" "$TEST_AIO_NGINX_HOSTNAME" "$TEST_AIO_LOCALCERTS_DIR"
|
||||
RUN rm /tmp/create-selfsigned-cert.sh
|
||||
RUN update-ca-certificates
|
||||
|
||||
|
||||
# Set up nginx (for production and testing)
|
||||
RUN sed -i -E "s|^user\s+\S+;|user $AIO_WWW_USER;|" /etc/nginx/nginx.conf
|
||||
RUN rm -f /etc/nginx/conf.d/*
|
||||
RUN rm -f /etc/nginx/sites-enabled/*
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$TEST_AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$TEST_AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$TEST_AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
|
||||
|
||||
# Set up pm2
|
||||
RUN pm2 startup --user root > /dev/null
|
||||
|
||||
|
||||
# Set up the shell scripts
|
||||
COPY scripts-sh/ $AIO_SCRIPTS_SH_DIR/
|
||||
RUN chmod a+x $AIO_SCRIPTS_SH_DIR/*
|
||||
RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
|
||||
| while read file; do ln -s $AIO_SCRIPTS_SH_DIR/$file /usr/local/bin/aio-${file%.*}; done
|
||||
|
||||
|
||||
# Set up the Node.js scripts
|
||||
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
|
||||
WORKDIR $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn install --production --frozen-lockfile
|
||||
|
||||
|
||||
# Set up health check
|
||||
HEALTHCHECK --interval=5m CMD /usr/local/bin/aio-health-check
|
||||
|
||||
|
||||
# Go!
|
||||
WORKDIR /
|
||||
CMD aio-init && tail -f /dev/null
|
@ -1,2 +0,0 @@
|
||||
# Periodically clean up builds that do not correspond to currently open PRs
|
||||
0 12 * * * /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1
|
@ -1,16 +0,0 @@
|
||||
# Do not read /etc/resolv.conf. Get servers from this file instead.
|
||||
no-resolv
|
||||
server=8.8.8.8
|
||||
server=8.8.4.4
|
||||
|
||||
# Listen for DHCP and DNS requests only on this address.
|
||||
listen-address=127.0.0.1
|
||||
|
||||
# Force an IP address for these domains.
|
||||
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
|
||||
# Run as root (required from inside docker container).
|
||||
user=root
|
@ -1,9 +0,0 @@
|
||||
/var/log/aio/clean-up.log /var/log/aio/init.log /var/log/aio/verify-setup.log {
|
||||
compress
|
||||
create
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
/var/log/aio/nginx/*.log /var/log/aio/nginx-test/*.log {
|
||||
compress
|
||||
create
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
sharedscripts
|
||||
postrotate
|
||||
service nginx rotate >/dev/null 2>&1
|
||||
endscript
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
/var/log/aio/preview-server-*.log {
|
||||
compress
|
||||
copytruncate
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -1,123 +0,0 @@
|
||||
# Redirect all HTTP traffic to HTTPS
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTP}} default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTP}};
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Ideally we want 308 (permanent + keep original method),
|
||||
# but it is relatively new and not supported by some clients (e.g. cURL).
|
||||
return 307 https://$host:{{$AIO_NGINX_PORT_HTTPS}}$request_uri;
|
||||
}
|
||||
|
||||
# Serve PR-preview requests
|
||||
server {
|
||||
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{7,40})\.";
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||
|
||||
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
|
||||
disable_symlinks on from=$document_root;
|
||||
index index.html;
|
||||
|
||||
gzip on;
|
||||
gzip_comp_level 7;
|
||||
gzip_types *;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
error_page 404 /404.html;
|
||||
location "=/404.html" {
|
||||
internal;
|
||||
}
|
||||
|
||||
location "~/[^/]+\.[^/]+$" {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html =404;
|
||||
}
|
||||
}
|
||||
|
||||
# Handle all other requests
|
||||
server {
|
||||
server_name _;
|
||||
|
||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2 default_server;
|
||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||
|
||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
# Health check
|
||||
location "~^/health-check/?$" {
|
||||
add_header Content-Type text/plain;
|
||||
return 200 '';
|
||||
}
|
||||
|
||||
# Check PRs previewability
|
||||
location "~^/can-have-public-preview/\d+/?$" {
|
||||
if ($request_method != "GET") {
|
||||
add_header Allow "GET";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method GET;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Notify about CircleCI builds
|
||||
location "~^/circle-build/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Notify about PR changes
|
||||
location "~^/pr-updated/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Everything else
|
||||
location / {
|
||||
return 404;
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eu -o pipefail
|
||||
|
||||
|
||||
# Variables
|
||||
confFile=/tmp/$1.conf
|
||||
domainName=$2
|
||||
outDir=$3
|
||||
|
||||
|
||||
# Create certificate
|
||||
cp /etc/ssl/openssl.cnf "$confFile"
|
||||
echo "[subjectAltName]" >> "$confFile"
|
||||
echo "subjectAltName = DNS:$domainName, DNS:*.$domainName" >> "$confFile"
|
||||
mkdir -p $outDir
|
||||
openssl req -days 365 -newkey rsa:2048 -nodes -sha256 -x509 \
|
||||
-config "$confFile" -extensions subjectAltName -subj "/CN=$domainName" \
|
||||
-out "$outDir/$domainName.crt" -keyout "$outDir/$domainName.key"
|
||||
chmod -R 400 "$outDir"
|
||||
cp "$outDir/$domainName.crt" /usr/local/share/ca-certificates
|
@ -1,2 +0,0 @@
|
||||
/dist
|
||||
/node_modules
|
@ -1,127 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty, getPrInfoFromDownloadPath, Logger} from '../common/utils';
|
||||
|
||||
// Classes
|
||||
export class BuildCleaner {
|
||||
|
||||
private logger = new Logger('BuildCleaner');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string, protected githubOrg: string, protected githubRepo: string,
|
||||
protected githubToken: string, protected downloadsDir: string, protected artifactPath: string) {
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
assertNotMissingOrEmpty('downloadsDir', downloadsDir);
|
||||
assertNotMissingOrEmpty('artifactPath', artifactPath);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public async cleanUp(): Promise<void> {
|
||||
try {
|
||||
this.logger.log('Cleaning up builds and downloads');
|
||||
const openPrs = await this.getOpenPrNumbers();
|
||||
this.logger.log(`Open pull requests: ${openPrs.length}`);
|
||||
await Promise.all([
|
||||
this.cleanBuilds(openPrs),
|
||||
this.cleanDownloads(openPrs),
|
||||
]);
|
||||
} catch (error) {
|
||||
this.logger.error('ERROR:', error);
|
||||
}
|
||||
}
|
||||
|
||||
public async cleanBuilds(openPrs: number[]): Promise<void> {
|
||||
const existingBuilds = await this.getExistingBuildNumbers();
|
||||
await this.removeUnnecessaryBuilds(existingBuilds, openPrs);
|
||||
}
|
||||
|
||||
public async cleanDownloads(openPrs: number[]): Promise<void> {
|
||||
const existingDownloads = await this.getExistingDownloads();
|
||||
await this.removeUnnecessaryDownloads(existingDownloads, openPrs);
|
||||
}
|
||||
|
||||
public getExistingBuildNumbers(): Promise<number[]> {
|
||||
return new Promise<number[]>((resolve, reject) => {
|
||||
fs.readdir(this.buildsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const buildNumbers = files.
|
||||
map(name => name.replace(HIDDEN_DIR_PREFIX, '')). // Remove the "hidden dir" prefix
|
||||
map(Number). // Convert string to number
|
||||
filter(Boolean); // Ignore NaN (or 0), because they are not builds
|
||||
|
||||
resolve(buildNumbers);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public async getOpenPrNumbers(): Promise<number[]> {
|
||||
const api = new GithubApi(this.githubToken);
|
||||
const githubPullRequests = new GithubPullRequests(api, this.githubOrg, this.githubRepo);
|
||||
const prs = await githubPullRequests.fetchAll('open');
|
||||
return prs.map(pr => pr.number);
|
||||
}
|
||||
|
||||
public removeDir(dir: string): void {
|
||||
try {
|
||||
if (shell.test('-d', dir)) {
|
||||
shell.chmod('-R', 'a+w', dir);
|
||||
shell.rm('-rf', dir);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
public removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]): void {
|
||||
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
|
||||
|
||||
this.logger.log(`Existing builds: ${existingBuildNumbers.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||
|
||||
// Try removing public dirs.
|
||||
toRemove.
|
||||
map(num => path.join(this.buildsDir, String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
|
||||
// Try removing hidden dirs.
|
||||
toRemove.
|
||||
map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
}
|
||||
|
||||
public getExistingDownloads(): Promise<string[]> {
|
||||
const artifactFile = path.basename(this.artifactPath);
|
||||
return new Promise<string[]>((resolve, reject) => {
|
||||
fs.readdir(this.downloadsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
files = files.filter(file => file.endsWith(artifactFile));
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public removeUnnecessaryDownloads(existingDownloads: string[], openPrNumbers: number[]): void {
|
||||
const toRemove = existingDownloads.filter(filePath => {
|
||||
const {pr} = getPrInfoFromDownloadPath(filePath);
|
||||
return !openPrNumbers.includes(pr);
|
||||
});
|
||||
|
||||
this.logger.log(`Existing downloads: ${existingDownloads.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} download(s): ${toRemove.join(', ')}`);
|
||||
|
||||
toRemove.forEach(filePath => shell.rm(path.join(this.downloadsDir, filePath)));
|
||||
}
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
} from '../common/env-variables';
|
||||
import {BuildCleaner} from './build-cleaner';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
const buildCleaner = new BuildCleaner(
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_DOWNLOADS_DIR,
|
||||
AIO_ARTIFACT_PATH);
|
||||
|
||||
buildCleaner.cleanUp().catch(() => process.exit(1));
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
// Imports
|
||||
import fetch from 'node-fetch';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const CIRCLE_CI_API_URL = 'https://circleci.com/api/v1.1/project/github';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface ArtifactInfo {
|
||||
path: string;
|
||||
pretty_path: string;
|
||||
node_index: number;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export type ArtifactResponse = ArtifactInfo[];
|
||||
|
||||
export interface BuildInfo {
|
||||
reponame: string;
|
||||
failed: boolean;
|
||||
branch: string;
|
||||
username: string;
|
||||
build_num: number;
|
||||
has_artifacts: boolean;
|
||||
outcome: string; // e.g. 'success'
|
||||
vcs_revision: string; // HEAD SHA
|
||||
// there are other fields but they are not used in this code
|
||||
}
|
||||
|
||||
/**
|
||||
* A Helper that can interact with the CircleCI API.
|
||||
*/
|
||||
export class CircleCiApi {
|
||||
|
||||
private tokenParam = `circle-token=${this.circleCiToken}`;
|
||||
|
||||
/**
|
||||
* Construct a helper that can interact with the CircleCI REST API.
|
||||
* @param githubOrg The Github organisation whose repos we want to access in CircleCI (e.g. angular).
|
||||
* @param githubRepo The Github repo whose builds we want to access in CircleCI (e.g. angular).
|
||||
* @param circleCiToken The CircleCI API access token (secret).
|
||||
*/
|
||||
constructor(
|
||||
private githubOrg: string,
|
||||
private githubRepo: string,
|
||||
private circleCiToken: string,
|
||||
) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('circleCiToken', circleCiToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the info for a build from the CircleCI API
|
||||
* @param buildNumber The CircleCI build number that generated the artifact.
|
||||
* @returns A promise to the info about the build
|
||||
*/
|
||||
public async getBuildInfo(buildNumber: number): Promise<BuildInfo> {
|
||||
try {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
const response = await fetch(`${baseUrl}?${this.tokenParam}`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`${baseUrl}: ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI build info request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the CircleCI API to get a URL for a specified artifact from a specified build.
|
||||
* @param artifactPath The path, within the build to the artifact.
|
||||
* @returns A promise to the URL that can be requested to download the actual build artifact file.
|
||||
*/
|
||||
public async getBuildArtifactUrl(buildNumber: number, artifactPath: string): Promise<string> {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/artifacts?${this.tokenParam}`);
|
||||
const artifacts = await response.json() as ArtifactResponse;
|
||||
const artifact = artifacts.find(item => item.path === artifactPath);
|
||||
if (!artifact) {
|
||||
throw new Error(`Missing artifact (${artifactPath}) for CircleCI build: ${buildNumber}`);
|
||||
}
|
||||
return artifact.url;
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI artifact URL request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
// Constants
|
||||
export const AIO_DOWNLOADS_DIR = '/tmp/aio-downloads';
|
||||
export const HIDDEN_DIR_PREFIX = 'hidden--';
|
||||
export const SHORT_SHA_LEN = 7;
|
@ -1,19 +0,0 @@
|
||||
import {getEnvVar} from './utils';
|
||||
|
||||
export const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
export const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
export const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
export const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
export const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
|
||||
export const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
export const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
export const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
||||
export const AIO_NGINX_HOSTNAME = getEnvVar('AIO_NGINX_HOSTNAME');
|
||||
export const AIO_NGINX_PORT_HTTP = +getEnvVar('AIO_NGINX_PORT_HTTP');
|
||||
export const AIO_NGINX_PORT_HTTPS = +getEnvVar('AIO_NGINX_PORT_HTTPS');
|
||||
export const AIO_SIGNIFICANT_FILES_PATTERN = getEnvVar('AIO_SIGNIFICANT_FILES_PATTERN');
|
||||
export const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
export const AIO_PREVIEW_SERVER_HOSTNAME = getEnvVar('AIO_PREVIEW_SERVER_HOSTNAME');
|
||||
export const AIO_PREVIEW_SERVER_PORT = +getEnvVar('AIO_PREVIEW_SERVER_PORT');
|
||||
export const AIO_ARTIFACT_MAX_SIZE = +getEnvVar('AIO_ARTIFACT_MAX_SIZE');
|
||||
export const AIO_WWW_USER = getEnvVar('AIO_WWW_USER');
|
@ -1,111 +0,0 @@
|
||||
// Imports
|
||||
import {IncomingMessage} from 'http';
|
||||
import * as https from 'https';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const GITHUB_HOSTNAME = 'api.github.com';
|
||||
|
||||
// Interfaces - Types
|
||||
interface RequestParams {
|
||||
[key: string]: string | number;
|
||||
}
|
||||
|
||||
type RequestParamsOrNull = RequestParams | null;
|
||||
|
||||
// Classes
|
||||
export class GithubApi {
|
||||
protected requestHeaders: {[key: string]: string};
|
||||
|
||||
// Constructor
|
||||
constructor(githubToken: string) {
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
|
||||
this.requestHeaders = {
|
||||
'Authorization': `token ${githubToken}`,
|
||||
'User-Agent': `Node/${process.versions.node}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public get<T = any>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('get', path);
|
||||
}
|
||||
|
||||
public post<T = any>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('post', path, data);
|
||||
}
|
||||
|
||||
// In GitHub API paginated requests, page numbering is 1-based. (https://developer.github.com/v3/#pagination)
|
||||
public getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 1): Promise<T[]> {
|
||||
const perPage = 100;
|
||||
const params = {
|
||||
...baseParams,
|
||||
page: currentPage,
|
||||
per_page: perPage,
|
||||
};
|
||||
|
||||
return this.get<T[]>(pathname, params).then(items => {
|
||||
if (items.length < perPage) {
|
||||
return items;
|
||||
}
|
||||
|
||||
return this.getPaginated<T>(pathname, baseParams, currentPage + 1).then(moreItems => [...items, ...moreItems]);
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
|
||||
if (params == null) {
|
||||
return pathname;
|
||||
}
|
||||
|
||||
const search = (params === null) ? '' : this.serializeSearchParams(params);
|
||||
const joiner = search && '?';
|
||||
|
||||
return `${pathname}${joiner}${search}`;
|
||||
}
|
||||
|
||||
protected request<T>(method: string, path: string, data: any = null): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const options = {
|
||||
headers: {...this.requestHeaders},
|
||||
host: GITHUB_HOSTNAME,
|
||||
method,
|
||||
path,
|
||||
};
|
||||
|
||||
const onError = (statusCode: number, responseText: string) => {
|
||||
const url = `https://${GITHUB_HOSTNAME}${path}`;
|
||||
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
|
||||
};
|
||||
const onSuccess = (responseText: string) => {
|
||||
try { resolve(responseText && JSON.parse(responseText)); } catch (err) { reject(err); }
|
||||
};
|
||||
const onResponse = (res: IncomingMessage) => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
const isSuccess = (200 <= statusCode) && (statusCode < 400);
|
||||
let responseText = '';
|
||||
|
||||
res.
|
||||
on('data', d => responseText += d).
|
||||
on('end', () => isSuccess ? onSuccess(responseText) : onError(statusCode, responseText)).
|
||||
on('error', reject);
|
||||
};
|
||||
|
||||
https.
|
||||
request(options, onResponse).
|
||||
on('error', reject).
|
||||
end(data && JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
protected serializeSearchParams(params: RequestParams): string {
|
||||
return Object.keys(params).
|
||||
filter(key => params[key] != null).
|
||||
map(key => `${key}=${encodeURIComponent(String(params[key]))}`).
|
||||
join('&');
|
||||
}
|
||||
}
|
@ -1,79 +0,0 @@
|
||||
import {GithubApi} from './github-api';
|
||||
import {assert, assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface PullRequest {
|
||||
number: number;
|
||||
user: {login: string};
|
||||
labels: {name: string}[];
|
||||
}
|
||||
|
||||
export interface FileInfo {
|
||||
sha: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export type PullRequestState = 'all' | 'closed' | 'open';
|
||||
|
||||
/**
|
||||
* Access pull requests on GitHub.
|
||||
*/
|
||||
export class GithubPullRequests {
|
||||
public repoSlug: string;
|
||||
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
* @param githubRepo The repository on Github with whose PRs we will interact.
|
||||
*/
|
||||
constructor(private api: GithubApi, githubOrg: string, githubRepo: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
this.repoSlug = `${githubOrg}/${githubRepo}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a comment on a PR.
|
||||
* @param pr The number of the PR on which to comment.
|
||||
* @param body The body of the comment to post.
|
||||
* @returns A promise that resolves when the comment has been posted.
|
||||
*/
|
||||
public addComment(pr: number, body: string): Promise<any> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
assert(!!body, `Invalid or empty comment body: ${body}`);
|
||||
return this.api.post<any>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about a PR.
|
||||
* @param pr The number of the PR for which to request info.
|
||||
* @returns A promise that is resolves with information about the specified PR.
|
||||
*/
|
||||
public fetch(pr: number): Promise<PullRequest> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
// Using the `/issues/` URL, because the `/pulls/` one does not provide labels.
|
||||
return this.api.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all PRs that match the given state.
|
||||
* @param state Only retrieve PRs that have this state.
|
||||
* @returns A promise that is resolved with information about the requested PRs.
|
||||
*/
|
||||
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
||||
const pathname = `/repos/${this.repoSlug}/pulls`;
|
||||
const params = {state};
|
||||
|
||||
return this.api.getPaginated<PullRequest>(pathname, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request a list of files for the given PR.
|
||||
* @param pr The number of the PR for which to request files.
|
||||
* @returns A promise that resolves to an array of file information
|
||||
*/
|
||||
public fetchFiles(pr: number): Promise<FileInfo[]> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
return this.api.getPaginated<FileInfo>(`/repos/${this.repoSlug}/pulls/${pr}/files`);
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
import {GithubApi} from './github-api';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface Team {
|
||||
id: number;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
export interface TeamMembership {
|
||||
state: string;
|
||||
}
|
||||
|
||||
export class GithubTeams {
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
*/
|
||||
constructor(private api: GithubApi, protected githubOrg: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all the organisation's teams in GitHub.
|
||||
* @returns A promise that is resolved with information about the teams.
|
||||
*/
|
||||
public fetchAll(): Promise<Team[]> {
|
||||
return this.api.getPaginated<Team>(`/orgs/${this.githubOrg}/teams`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the specified username is a member of the specified team.
|
||||
* @param username The usernane to check for in the team.
|
||||
* @param teamIds The team to check for the username.
|
||||
* @returns a Promise that resolves to `true` if the username is a member of the team.
|
||||
*/
|
||||
public async isMemberById(username: string, teamIds: number[]): Promise<boolean> {
|
||||
|
||||
const getMembership = async (teamId: number) => {
|
||||
try {
|
||||
const {state} = await this.api.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`);
|
||||
return state === 'active';
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
for (const teamId of teamIds) {
|
||||
if (await getMembership(teamId)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the given username is a member of the teams specified by the team slugs.
|
||||
* @param username The username to check for in the teams.
|
||||
* @param teamSlugs A collection of slugs that represent the teams to check for the the username.
|
||||
* @returns a Promise that resolves to `true` if the usernane is a member of at least one of the specified teams.
|
||||
*/
|
||||
public async isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
|
||||
try {
|
||||
const teams = await this.fetchAll();
|
||||
const teamIds = teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id);
|
||||
return await this.isMemberById(username, teamIds);
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
// We can't use `import...from` here, because of the following mess:
|
||||
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
|
||||
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
|
||||
//
|
||||
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
|
||||
// `jasmine-core` module and the `jasmine` module).
|
||||
import Jasmine = require('jasmine');
|
||||
import 'source-map-support/register';
|
||||
|
||||
export const runTests = (specFiles: string[]) => {
|
||||
const config = {
|
||||
random: true,
|
||||
spec_files: specFiles,
|
||||
stopSpecOnExpectationFailure: true,
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', (reason: any) => console.log('Unhandled rejection:', reason));
|
||||
|
||||
const runner = new Jasmine({});
|
||||
runner.loadConfig(config);
|
||||
runner.onComplete((passed: boolean) => process.exit(passed ? 0 : 1));
|
||||
runner.execute();
|
||||
};
|
@ -1,98 +0,0 @@
|
||||
import {basename, resolve as resolvePath} from 'path';
|
||||
import {SHORT_SHA_LEN} from './constants';
|
||||
|
||||
/**
|
||||
* Shorten a SHA to make it more readable
|
||||
* @param sha The SHA to shorten.
|
||||
*/
|
||||
export function computeShortSha(sha: string) {
|
||||
return sha.substr(0, SHORT_SHA_LEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the path for a downloaded artifact file.
|
||||
* @param downloadsDir The directory where artifacts are downloaded
|
||||
* @param pr The PR associated with this artifact.
|
||||
* @param sha The SHA associated with the build for this artifact.
|
||||
* @param artifactPath The path to the artifact on CircleCI.
|
||||
* @returns The fully resolved location for the specified downloaded artifact.
|
||||
*/
|
||||
export function computeArtifactDownloadPath(downloadsDir: string, pr: number, sha: string, artifactPath: string) {
|
||||
return resolvePath(downloadsDir, `${pr}-${computeShortSha(sha)}-${basename(artifactPath)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the PR number and latest commit SHA from a downloaded file path.
|
||||
* @param downloadPath the path to the downloaded file.
|
||||
* @returns An object whose keys are the PR and SHA extracted from the file path.
|
||||
*/
|
||||
export function getPrInfoFromDownloadPath(downloadPath: string) {
|
||||
const file = basename(downloadPath);
|
||||
const [pr, sha] = file.split('-');
|
||||
return {pr: +pr, sha};
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a value is true.
|
||||
* @param value The value to assert.
|
||||
* @param message The message if the value is not true.
|
||||
*/
|
||||
export function assert(value: boolean, message: string) {
|
||||
if (!value) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a parameter is not equal to "".
|
||||
* @param name The name of the parameter.
|
||||
* @param value The value of the parameter.
|
||||
*/
|
||||
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => {
|
||||
assert(!!value, `Missing or empty required parameter '${name}'!`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get an environment variable.
|
||||
* @param name The name of the environment variable.
|
||||
* @param isOptional True if the variable is optional.
|
||||
* @returns The value of the variable or "" if it is optional and falsy.
|
||||
* @throws `Error` if the variable is falsy and not optional.
|
||||
*/
|
||||
export const getEnvVar = (name: string, isOptional = false): string => {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!isOptional && !value) {
|
||||
try {
|
||||
throw new Error(`ERROR: Missing required environment variable '${name}'!`);
|
||||
} catch (error) {
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return value || '';
|
||||
};
|
||||
|
||||
/**
|
||||
* A basic logger implementation.
|
||||
* Delegates to `console`, but prepends each message with the current date and specified scope (i.e caller).
|
||||
*/
|
||||
export class Logger {
|
||||
private padding = ' '.repeat(20 - this.scope.length);
|
||||
|
||||
/**
|
||||
* Create a new `Logger` instance for the specified `scope`.
|
||||
* @param scope The logger's scope (added to all messages).
|
||||
*/
|
||||
constructor(private scope: string) {}
|
||||
|
||||
public error(...args: any[]) { this.callMethod('error', args); }
|
||||
public info(...args: any[]) { this.callMethod('info', args); }
|
||||
public log(...args: any[]) { this.callMethod('log', args); }
|
||||
public warn(...args: any[]) { this.callMethod('warn', args); }
|
||||
|
||||
private callMethod(method: 'error' | 'info' | 'log' | 'warn', args: any[]) {
|
||||
console[method](`[${new Date()}]`, `${this.scope}:${this.padding}`, ...args);
|
||||
}
|
||||
}
|
@ -1,144 +0,0 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import {EventEmitter} from 'events';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
// Classes
|
||||
export class BuildCreator extends EventEmitter {
|
||||
|
||||
private logger = new Logger('BuildCreator');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string) {
|
||||
super();
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public create(pr: number, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
|
||||
// Use only part of the SHA for more readable URLs.
|
||||
sha = computeShortSha(sha);
|
||||
|
||||
const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic);
|
||||
const shaDir = path.join(prDir, sha);
|
||||
let dirToRemoveOnError: string;
|
||||
|
||||
return Promise.resolve().
|
||||
// If the same PR exists with different visibility, update the visibility first.
|
||||
then(() => this.updatePrVisibility(pr, isPublic)).
|
||||
then(() => Promise.all([this.exists(prDir), this.exists(shaDir)])).
|
||||
then(([prDirExisted, shaDirExisted]) => {
|
||||
if (shaDirExisted) {
|
||||
const publicOrNot = isPublic ? 'public' : 'non-public';
|
||||
throw new PreviewServerError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
}
|
||||
|
||||
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
||||
|
||||
return Promise.resolve().
|
||||
then(() => shell.mkdir('-p', shaDir)).
|
||||
then(() => this.extractArchive(archivePath, shaDir)).
|
||||
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha, isPublic))).
|
||||
then(() => undefined);
|
||||
}).
|
||||
catch(err => {
|
||||
if (dirToRemoveOnError) {
|
||||
shell.rm('-rf', dirToRemoveOnError);
|
||||
}
|
||||
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while creating preview at: ${shaDir}\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
public updatePrVisibility(pr: number, makePublic: boolean): Promise<boolean> {
|
||||
const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic);
|
||||
|
||||
return Promise.
|
||||
all([this.exists(otherVisPrDir), this.exists(targetVisPrDir)]).
|
||||
then(([otherVisPrDirExisted, targetVisPrDirExisted]) => {
|
||||
if (!otherVisPrDirExisted) {
|
||||
// No visibility change: Either the visibility is up-to-date or the PR does not exist.
|
||||
return false;
|
||||
} else if (targetVisPrDirExisted) {
|
||||
// Error: Directories for both visibilities exist.
|
||||
throw new PreviewServerError(409,
|
||||
`Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
|
||||
}
|
||||
|
||||
// Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`.
|
||||
return Promise.resolve().
|
||||
then(() => shell.mv(otherVisPrDir, targetVisPrDir)).
|
||||
then(() => this.listShasByDate(targetVisPrDir)).
|
||||
then(shas => this.emit(ChangedPrVisibilityEvent.type, new ChangedPrVisibilityEvent(+pr, shas, makePublic))).
|
||||
then(() => true);
|
||||
}).
|
||||
catch(err => {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected exists(fileOrDir: string): Promise<boolean> {
|
||||
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
|
||||
}
|
||||
|
||||
protected extractArchive(inputFile: string, outputDir: string): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const cmd = `tar --extract --gzip --directory "${outputDir}" --file "${inputFile}"`;
|
||||
|
||||
cp.exec(cmd, (err, _stdout, stderr) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
this.logger.warn(stderr);
|
||||
}
|
||||
|
||||
try {
|
||||
shell.chmod('-R', 'a-w', outputDir);
|
||||
shell.rm('-f', inputFile);
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected getCandidatePrDirs(pr: number, isPublic: boolean): {oldPrDir: string, newPrDir: string} {
|
||||
const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr);
|
||||
const publicPrDir = path.join(this.buildsDir, `${pr}`);
|
||||
|
||||
const oldPrDir = isPublic ? hiddenPrDir : publicPrDir;
|
||||
const newPrDir = isPublic ? publicPrDir : hiddenPrDir;
|
||||
|
||||
return {oldPrDir, newPrDir};
|
||||
}
|
||||
|
||||
protected listShasByDate(inputDir: string): Promise<string[]> {
|
||||
return Promise.resolve().
|
||||
then(() => shell.ls('-l', inputDir) as any as Promise<(fs.Stats & {name: string})[]>).
|
||||
// Keep directories only.
|
||||
// (Also, convert to standard Array - ShellJS provides custom `sort()` method for sorting file contents.)
|
||||
then(items => items.filter(item => item.isDirectory())).
|
||||
// Sort by modification date.
|
||||
then(items => items.sort((a, b) => a.mtime.getTime() - b.mtime.getTime())).
|
||||
// Return directory names.
|
||||
then(items => items.map(item => item.name));
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
// Classes
|
||||
export class ChangedPrVisibilityEvent {
|
||||
// Properties - Public, Static
|
||||
public static type = 'pr.changedVisibility';
|
||||
|
||||
// Constructor
|
||||
constructor(public pr: number, public shas: string[], public isPublic: boolean) {}
|
||||
}
|
||||
|
||||
export class CreatedBuildEvent {
|
||||
// Properties - Public, Static
|
||||
public static type = 'build.created';
|
||||
|
||||
// Constructor
|
||||
constructor(public pr: number, public sha: string, public isPublic: boolean) {}
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import fetch from 'node-fetch';
|
||||
import {dirname} from 'path';
|
||||
import {mkdir} from 'shelljs';
|
||||
import {promisify} from 'util';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {assert, assertNotMissingOrEmpty, computeArtifactDownloadPath, Logger} from '../common/utils';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
export interface GithubInfo {
|
||||
org: string;
|
||||
pr: number;
|
||||
repo: string;
|
||||
sha: string;
|
||||
success: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper that can get information about builds and download build artifacts.
|
||||
*/
|
||||
export class BuildRetriever {
|
||||
private logger = new Logger('BuildRetriever');
|
||||
constructor(private api: CircleCiApi, private downloadSizeLimit: number, private downloadDir: string) {
|
||||
assert(downloadSizeLimit > 0, 'Invalid parameter "downloadSizeLimit" should be a number greater than 0.');
|
||||
assertNotMissingOrEmpty('downloadDir', downloadDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get GitHub information about a build
|
||||
* @param buildNum The number of the build for which to retrieve the info.
|
||||
* @returns The Github org, repo, PR and latest SHA for the specified build.
|
||||
*/
|
||||
public async getGithubInfo(buildNum: number): Promise<GithubInfo> {
|
||||
const buildInfo = await this.api.getBuildInfo(buildNum);
|
||||
const githubInfo: GithubInfo = {
|
||||
org: buildInfo.username,
|
||||
pr: getPrFromBranch(buildInfo.branch),
|
||||
repo: buildInfo.reponame,
|
||||
sha: buildInfo.vcs_revision,
|
||||
success: !buildInfo.failed,
|
||||
};
|
||||
return githubInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a request to the given URL for a build artifact and store it locally.
|
||||
* @param buildNum the number of the CircleCI build whose artifact we want to download.
|
||||
* @param pr the number of the PR that triggered the CircleCI build.
|
||||
* @param sha the commit in the PR that triggered the CircleCI build.
|
||||
* @param artifactPath the path on CircleCI where the artifact was stored.
|
||||
* @returns A promise to the file path where the downloaded file was stored.
|
||||
*/
|
||||
public async downloadBuildArtifact(buildNum: number, pr: number, sha: string, artifactPath: string): Promise<string> {
|
||||
try {
|
||||
const outPath = computeArtifactDownloadPath(this.downloadDir, pr, sha, artifactPath);
|
||||
const downloadExists = await new Promise(resolve => fs.exists(outPath, exists => resolve(exists)));
|
||||
if (!downloadExists) {
|
||||
const url = await this.api.getBuildArtifactUrl(buildNum, artifactPath);
|
||||
const response = await fetch(url, {size: this.downloadSizeLimit});
|
||||
if (response.status !== 200) {
|
||||
throw new PreviewServerError(response.status, `Error ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
const buffer = await response.buffer();
|
||||
mkdir('-p', dirname(outPath));
|
||||
await promisify(fs.writeFile)(outPath, buffer);
|
||||
}
|
||||
return outPath;
|
||||
} catch (error) {
|
||||
this.logger.warn(error);
|
||||
const status = (error.type === 'max-size') ? 413 : 500;
|
||||
throw new PreviewServerError(status, `CircleCI artifact download failed (${error.message || error})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getPrFromBranch(branch: string): number {
|
||||
// CircleCI only exposes PR numbers via the `branch` field :-(
|
||||
const match = /^pull\/(\d+)$/.exec(branch);
|
||||
if (!match) {
|
||||
throw new Error(`No PR found in branch field: ${branch}`);
|
||||
}
|
||||
return +match[1];
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
|
||||
/**
|
||||
* A helper to verify whether builds are trusted.
|
||||
*/
|
||||
export class BuildVerifier {
|
||||
/**
|
||||
* Construct a new BuildVerifier instance.
|
||||
* @param prs A helper to access PR information.
|
||||
* @param teams A helper to access Github team information.
|
||||
* @param allowedTeamSlugs The teams that are trusted.
|
||||
* @param trustedPrLabel The github label that indicates that a PR is trusted.
|
||||
*/
|
||||
constructor(protected prs: GithubPullRequests, protected teams: GithubTeams,
|
||||
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
|
||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR contains files that are significant to the build.
|
||||
* @param pr The number of the PR to check
|
||||
* @param significantFilePattern A regex that selects files that are significant.
|
||||
*/
|
||||
public async getSignificantFilesChanged(pr: number, significantFilePattern: RegExp): Promise<boolean> {
|
||||
const files = await this.prs.fetchFiles(pr);
|
||||
return files.some(file => significantFilePattern.test(file.filename));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR is trusted.
|
||||
* @param pr The number of the PR to check.
|
||||
* @returns true if the PR is trusted.
|
||||
*/
|
||||
public async getPrIsTrusted(pr: number): Promise<boolean> {
|
||||
const prInfo = await this.prs.fetch(pr);
|
||||
return this.hasLabel(prInfo, this.trustedPrLabel) ||
|
||||
(await this.teams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
|
||||
}
|
||||
|
||||
protected hasLabel(prInfo: PullRequest, label: string): boolean {
|
||||
return prInfo.labels.some(labelObj => labelObj.name === label);
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_MAX_SIZE,
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_CIRCLE_CI_TOKEN,
|
||||
AIO_DOMAIN_NAME,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TEAM_SLUGS,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_PREVIEW_SERVER_HOSTNAME,
|
||||
AIO_PREVIEW_SERVER_PORT,
|
||||
AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
AIO_TRUSTED_PR_LABEL,
|
||||
} from '../common/env-variables';
|
||||
import {PreviewServerFactory} from './preview-server-factory';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
PreviewServerFactory
|
||||
.create({
|
||||
buildArtifactPath: AIO_ARTIFACT_PATH,
|
||||
buildsDir: AIO_BUILDS_DIR,
|
||||
circleCiToken: AIO_CIRCLE_CI_TOKEN,
|
||||
domainName: AIO_DOMAIN_NAME,
|
||||
downloadSizeLimit: AIO_ARTIFACT_MAX_SIZE,
|
||||
downloadsDir: AIO_DOWNLOADS_DIR,
|
||||
githubOrg: AIO_GITHUB_ORGANIZATION,
|
||||
githubRepo: AIO_GITHUB_REPO,
|
||||
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
|
||||
githubToken: AIO_GITHUB_TOKEN,
|
||||
significantFilesPattern: AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
|
||||
})
|
||||
.listen(AIO_PREVIEW_SERVER_PORT, AIO_PREVIEW_SERVER_HOSTNAME);
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
// Classes
|
||||
export class PreviewServerError extends Error {
|
||||
// Constructor
|
||||
constructor(public status: number = 500, message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, PreviewServerError.prototype);
|
||||
}
|
||||
}
|
@ -1,213 +0,0 @@
|
||||
// Imports
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {AddressInfo} from 'net';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assert, assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {BuildCreator} from './build-creator';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {BuildRetriever} from './build-retriever';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
import {respondWithError, throwRequestError} from './utils';
|
||||
|
||||
const AIO_PREVIEW_JOB = 'aio_preview';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface PreviewServerConfig {
|
||||
downloadsDir: string;
|
||||
downloadSizeLimit: number;
|
||||
buildArtifactPath: string;
|
||||
buildsDir: string;
|
||||
domainName: string;
|
||||
githubOrg: string;
|
||||
githubRepo: string;
|
||||
githubTeamSlugs: string[];
|
||||
circleCiToken: string;
|
||||
githubToken: string;
|
||||
significantFilesPattern: string;
|
||||
trustedPrLabel: string;
|
||||
}
|
||||
|
||||
const logger = new Logger('PreviewServer');
|
||||
|
||||
// Classes
|
||||
export class PreviewServerFactory {
|
||||
// Methods - Public
|
||||
public static create(cfg: PreviewServerConfig): http.Server {
|
||||
assertNotMissingOrEmpty('domainName', cfg.domainName);
|
||||
|
||||
const circleCiApi = new CircleCiApi(cfg.githubOrg, cfg.githubRepo, cfg.circleCiToken);
|
||||
const githubApi = new GithubApi(cfg.githubToken);
|
||||
const prs = new GithubPullRequests(githubApi, cfg.githubOrg, cfg.githubRepo);
|
||||
const teams = new GithubTeams(githubApi, cfg.githubOrg);
|
||||
|
||||
const buildRetriever = new BuildRetriever(circleCiApi, cfg.downloadSizeLimit, cfg.downloadsDir);
|
||||
const buildVerifier = new BuildVerifier(prs, teams, cfg.githubTeamSlugs, cfg.trustedPrLabel);
|
||||
const buildCreator = PreviewServerFactory.createBuildCreator(prs, cfg.buildsDir, cfg.domainName);
|
||||
|
||||
const middleware = PreviewServerFactory.createMiddleware(buildRetriever, buildVerifier, buildCreator, cfg);
|
||||
const httpServer = http.createServer(middleware as any);
|
||||
|
||||
httpServer.on('listening', () => {
|
||||
const info = httpServer.address() as AddressInfo;
|
||||
logger.info(`Up and running (and listening on ${info.address}:${info.port})...`);
|
||||
});
|
||||
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
public static createMiddleware(buildRetriever: BuildRetriever, buildVerifier: BuildVerifier,
|
||||
buildCreator: BuildCreator, cfg: PreviewServerConfig): express.Express {
|
||||
const middleware = express();
|
||||
const jsonParser = bodyParser.json();
|
||||
const significantFilesRe = new RegExp(cfg.significantFilesPattern);
|
||||
|
||||
// RESPOND TO IS-ALIVE PING
|
||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||
|
||||
// RESPOND TO CAN-HAVE-PUBLIC-PREVIEW CHECK
|
||||
const canHavePublicPreviewRe = /^\/can-have-public-preview\/(\d+)\/?$/;
|
||||
middleware.get(canHavePublicPreviewRe, async (req, res) => {
|
||||
try {
|
||||
const pr = +canHavePublicPreviewRe.exec(req.url)![1];
|
||||
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
// Cannot have preview: PR did not touch relevant files: `aio/` or `packages/` (except for spec files).
|
||||
res.send({canHavePublicPreview: false, reason: 'No significant files touched.'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because it did not touch any significant files.`);
|
||||
} else if (!await buildVerifier.getPrIsTrusted(pr)) {
|
||||
// Cannot have preview: PR not automatically verifiable as "trusted".
|
||||
res.send({canHavePublicPreview: false, reason: 'Not automatically verifiable as "trusted".'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because not automatically verifiable as "trusted".`);
|
||||
} else {
|
||||
// Can have preview.
|
||||
res.send({canHavePublicPreview: true, reason: null});
|
||||
logger.log(`PR:${pr} - Can have a public preview.`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Previewability check error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// CIRCLE_CI BUILD COMPLETE WEBHOOK
|
||||
middleware.post(/^\/circle-build\/?$/, jsonParser, async (req, res) => {
|
||||
try {
|
||||
if (!(
|
||||
req.is('json') &&
|
||||
req.body &&
|
||||
req.body.payload &&
|
||||
req.body.payload.build_num > 0 &&
|
||||
req.body.payload.build_parameters &&
|
||||
req.body.payload.build_parameters.CIRCLE_JOB
|
||||
)) {
|
||||
throwRequestError(400, `Incorrect body content. Expected JSON`, req);
|
||||
}
|
||||
|
||||
const job = req.body.payload.build_parameters.CIRCLE_JOB;
|
||||
const buildNum = req.body.payload.build_num;
|
||||
|
||||
logger.log(`Build:${buildNum}, Job:${job} - processing web-hook trigger`);
|
||||
|
||||
if (job !== AIO_PREVIEW_JOB) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`Build:${buildNum}, Job:${job} -`,
|
||||
`Skipping preview processing because this is not the "${AIO_PREVIEW_JOB}" job.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { pr, sha, org, repo, success } = await buildRetriever.getGithubInfo(buildNum);
|
||||
|
||||
if (!success) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - Skipping preview processing because this build did not succeed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
assert(cfg.githubOrg === org,
|
||||
`Invalid webhook: expected "githubOrg" property to equal "${cfg.githubOrg}" but got "${org}".`);
|
||||
assert(cfg.githubRepo === repo,
|
||||
`Invalid webhook: expected "githubRepo" property to equal "${cfg.githubRepo}" but got "${repo}".`);
|
||||
|
||||
// Do not deploy unless this PR has touched relevant files: `aio/` or `packages/` (except for spec files)
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - ` +
|
||||
`Skipping preview processing because this PR did not touch any significant files.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const artifactPath = await buildRetriever.downloadBuildArtifact(buildNum, pr, sha, cfg.buildArtifactPath);
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(pr);
|
||||
await buildCreator.create(pr, sha, artifactPath, isPublic);
|
||||
|
||||
res.sendStatus(isPublic ? 201 : 202);
|
||||
logger.log(`PR:${pr}, SHA:${computeShortSha(sha)}, Build:${buildNum} - ` +
|
||||
`Successfully created ${isPublic ? 'public' : 'non-public'} preview.`);
|
||||
} catch (err) {
|
||||
logger.error('CircleCI webhook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// GITHUB PR UPDATED WEBHOOK
|
||||
middleware.post(/^\/pr-updated\/?$/, jsonParser, async (req, res) => {
|
||||
const { action, number: prNo }: { action?: string, number?: number } = req.body;
|
||||
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
|
||||
|
||||
try {
|
||||
if (!visMayHaveChanged) {
|
||||
res.sendStatus(200);
|
||||
} else if (!prNo) {
|
||||
throwRequestError(400, `Missing or empty 'number' field`, req);
|
||||
} else {
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(prNo);
|
||||
await buildCreator.updatePrVisibility(prNo, isPublic);
|
||||
res.sendStatus(200);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('PR update hook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// ALL OTHER REQUESTS
|
||||
middleware.all('*', req => throwRequestError(404, 'Unknown resource', req));
|
||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => {
|
||||
const statusText = http.STATUS_CODES[err.status] || '???';
|
||||
logger.error(`Preview server error: ${err.status} - ${statusText}:`, err.message);
|
||||
respondWithError(res, err);
|
||||
});
|
||||
|
||||
return middleware;
|
||||
}
|
||||
|
||||
public static createBuildCreator(prs: GithubPullRequests, buildsDir: string, domainName: string): BuildCreator {
|
||||
const buildCreator = new BuildCreator(buildsDir);
|
||||
const postPreviewsComment = (pr: number, shas: string[]) => {
|
||||
const body = shas.
|
||||
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
|
||||
join('\n');
|
||||
|
||||
return prs.addComment(pr, body);
|
||||
};
|
||||
|
||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
|
||||
if (isPublic) {
|
||||
postPreviewsComment(pr, [sha]);
|
||||
}
|
||||
});
|
||||
|
||||
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
|
||||
if (isPublic && shas.length) {
|
||||
postPreviewsComment(pr, shas);
|
||||
}
|
||||
});
|
||||
|
||||
return buildCreator;
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import * as express from 'express';
|
||||
import {promisify} from 'util';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
/**
|
||||
* Update the response to report that an error has occurred.
|
||||
* @param res The response to configure as an error.
|
||||
* @param err The error that needs to be reported.
|
||||
*/
|
||||
export async function respondWithError(res: express.Response, err: any): Promise<void> {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, String((err && err.message) || err));
|
||||
}
|
||||
|
||||
res.status(err.status);
|
||||
await promisify(res.end.bind(res))(err.message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw an exception that describes the given error information.
|
||||
* @param status The HTTP status code include in the error.
|
||||
* @param error The error message to include in the error.
|
||||
* @param req The request that triggered this error.
|
||||
*/
|
||||
export function throwRequestError(status: number, error: string, req: express.Request): never {
|
||||
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
|
||||
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
|
||||
throw new PreviewServerError(status, message);
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
export const enum BuildNums {
|
||||
BUILD_INFO_ERROR = 1,
|
||||
BUILD_INFO_404,
|
||||
BUILD_INFO_BUILD_FAILED,
|
||||
BUILD_INFO_INVALID_GH_ORG,
|
||||
BUILD_INFO_INVALID_GH_REPO,
|
||||
CHANGED_FILES_ERROR,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
BUILD_ARTIFACTS_ERROR,
|
||||
BUILD_ARTIFACTS_404,
|
||||
BUILD_ARTIFACTS_EMPTY,
|
||||
BUILD_ARTIFACTS_MISSING,
|
||||
DOWNLOAD_ARTIFACT_ERROR,
|
||||
DOWNLOAD_ARTIFACT_404,
|
||||
DOWNLOAD_ARTIFACT_TOO_BIG,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
|
||||
export const enum PrNums {
|
||||
CHANGED_FILES_ERROR = 1,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
|
||||
export const SHA = '1234567890'.repeat(4);
|
||||
export const ALT_SHA = 'abcde'.repeat(8);
|
||||
export const SIMILAR_SHA = SHA.slice(0, -1) + 'A';
|
@ -1,10 +0,0 @@
|
||||
declare module 'delete-empty' {
|
||||
interface Options {
|
||||
dryRun: boolean;
|
||||
verbose: boolean;
|
||||
filter: (filePath: string) => boolean;
|
||||
}
|
||||
export default function deleteEmpty(cwd: string, options?: Options): Promise<string[]>;
|
||||
export default function deleteEmpty(cwd: string, options?: Options, callback?: (err: any, deleted: string[]) => void): void;
|
||||
export function sync(cwd: string, options?: Options): string[];
|
||||
}
|
@ -1,237 +0,0 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {AIO_DOWNLOADS_DIR, HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_NGINX_PORT_HTTP,
|
||||
AIO_NGINX_PORT_HTTPS,
|
||||
AIO_WWW_USER,
|
||||
} from '../common/env-variables';
|
||||
import {computeShortSha, Logger} from '../common/utils';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface CmdResult { success: boolean; err: Error | null; stdout: string; stderr: string; }
|
||||
export interface FileSpecs { content?: string; size?: number; }
|
||||
|
||||
export type CleanUpFn = () => void;
|
||||
export type TestSuiteFactory = (scheme: string, port: number) => void;
|
||||
export type VerifyCmdResultFn = (result: CmdResult) => void;
|
||||
|
||||
// Classes
|
||||
class Helper {
|
||||
|
||||
// Properties - Protected
|
||||
protected cleanUpFns: CleanUpFn[] = [];
|
||||
protected portPerScheme: {[scheme: string]: number} = {
|
||||
http: AIO_NGINX_PORT_HTTP,
|
||||
https: AIO_NGINX_PORT_HTTPS,
|
||||
};
|
||||
|
||||
private logger = new Logger('TestHelper');
|
||||
|
||||
// Constructor
|
||||
constructor() {
|
||||
shell.mkdir('-p', AIO_BUILDS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_BUILDS_DIR}`);
|
||||
shell.mkdir('-p', AIO_DOWNLOADS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_DOWNLOADS_DIR}`);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public cleanUp(): void {
|
||||
while (this.cleanUpFns.length) {
|
||||
// Clean-up fns remove themselves from the list.
|
||||
this.cleanUpFns[0]();
|
||||
}
|
||||
|
||||
const leftoverDownloads = fs.readdirSync(AIO_DOWNLOADS_DIR);
|
||||
const leftoverBuilds = fs.readdirSync(AIO_BUILDS_DIR);
|
||||
|
||||
if (leftoverDownloads.length) {
|
||||
this.logger.log(`Downloads directory '${AIO_DOWNLOADS_DIR}' is not empty after clean-up.`, leftoverDownloads);
|
||||
shell.rm('-rf', `${AIO_DOWNLOADS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length) {
|
||||
this.logger.log(`Builds directory '${AIO_BUILDS_DIR}' is not empty after clean-up.`, leftoverBuilds);
|
||||
shell.rm('-rf', `${AIO_BUILDS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length || leftoverDownloads.length) {
|
||||
throw new Error(`Unexpected test files not cleaned up.`);
|
||||
}
|
||||
}
|
||||
|
||||
public createDummyBuild(pr: number, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
|
||||
const prDir = this.getPrDir(pr, isPublic);
|
||||
const shaDir = this.getShaDir(prDir, sha, legacy);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
||||
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${prDir}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
||||
}
|
||||
|
||||
public getPrDir(pr: number, isPublic: boolean): string {
|
||||
const prDirName = isPublic ? '' + pr : HIDDEN_DIR_PREFIX + pr;
|
||||
return path.join(AIO_BUILDS_DIR, prDirName);
|
||||
}
|
||||
|
||||
public getShaDir(prDir: string, sha: string, legacy = false): string {
|
||||
return path.join(prDir, legacy ? sha : computeShortSha(sha));
|
||||
}
|
||||
|
||||
public readBuildFile(pr: number, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
return fs.readFileSync(absFilePath, 'utf8');
|
||||
}
|
||||
|
||||
public runCmd(cmd: string, opts: cp.ExecFileOptions = {}): Promise<CmdResult> {
|
||||
return new Promise(resolve => {
|
||||
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
|
||||
this.createCleanUpFn(() => proc.kill());
|
||||
});
|
||||
}
|
||||
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory): void {
|
||||
Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme]));
|
||||
}
|
||||
|
||||
public verifyResponse(status: number, regex: string | RegExp = /^/): VerifyCmdResultFn {
|
||||
return (result: CmdResult) => {
|
||||
const [headers, body] = result.stdout.
|
||||
split(/(?:\r?\n){2,}/).
|
||||
map(s => s.trim()).
|
||||
slice(-2); // In case of redirect, discard the previous headers.
|
||||
// Only keep the last to sections (final headers and body).
|
||||
|
||||
if (!result.success) {
|
||||
this.logger.log('Stdout:', result.stdout);
|
||||
this.logger.error('Stderr:', result.stderr);
|
||||
this.logger.error('Error:', result.err);
|
||||
}
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(headers).toMatch(new RegExp(`HTTP/(?:1\\.1|2) ${status} `));
|
||||
expect(body).toMatch(regex);
|
||||
};
|
||||
}
|
||||
|
||||
public writeBuildFile(pr: number, sha: string, relFilePath: string, content: string, isPublic = true,
|
||||
legacy = false): void {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
this.writeFile(absFilePath, {content}, true);
|
||||
}
|
||||
|
||||
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): void {
|
||||
if (!force && fs.existsSync(filePath)) {
|
||||
throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
|
||||
}
|
||||
|
||||
let cleanUpTarget = filePath;
|
||||
while (!fs.existsSync(path.dirname(cleanUpTarget))) {
|
||||
cleanUpTarget = path.dirname(cleanUpTarget);
|
||||
}
|
||||
|
||||
shell.mkdir('-p', path.dirname(filePath));
|
||||
if (size) {
|
||||
// Create a file of the specified size.
|
||||
cp.execSync(`fallocate -l ${size} ${filePath}`);
|
||||
} else {
|
||||
// Create a file with the specified content.
|
||||
fs.writeFileSync(filePath, content || '');
|
||||
}
|
||||
shell.exec(`chown ${AIO_WWW_USER} ${filePath}`);
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected createCleanUpFn(fn: () => void): CleanUpFn {
|
||||
const cleanUpFn = () => {
|
||||
const idx = this.cleanUpFns.indexOf(cleanUpFn);
|
||||
if (idx !== -1) {
|
||||
this.cleanUpFns.splice(idx, 1);
|
||||
fn();
|
||||
}
|
||||
};
|
||||
|
||||
this.cleanUpFns.push(cleanUpFn);
|
||||
|
||||
return cleanUpFn;
|
||||
}
|
||||
}
|
||||
|
||||
interface DefaultCurlOptions {
|
||||
defaultMethod?: CurlOptions['method'];
|
||||
defaultOptions?: CurlOptions['options'];
|
||||
defaultHeaders?: CurlOptions['headers'];
|
||||
defaultData?: CurlOptions['data'];
|
||||
defaultExtraPath?: CurlOptions['extraPath'];
|
||||
}
|
||||
|
||||
interface CurlOptions {
|
||||
method?: string;
|
||||
options?: string;
|
||||
headers?: string[];
|
||||
data?: any;
|
||||
url?: string;
|
||||
extraPath?: string;
|
||||
}
|
||||
|
||||
export function makeCurl(baseUrl: string, {
|
||||
defaultMethod = 'POST',
|
||||
defaultOptions = '',
|
||||
defaultHeaders = ['Content-Type: application/json'],
|
||||
defaultData = {},
|
||||
defaultExtraPath = '',
|
||||
}: DefaultCurlOptions = {}) {
|
||||
return function curl({
|
||||
method = defaultMethod,
|
||||
options = defaultOptions,
|
||||
headers = defaultHeaders,
|
||||
data = defaultData,
|
||||
url = baseUrl,
|
||||
extraPath = defaultExtraPath,
|
||||
}: CurlOptions) {
|
||||
const dataString = data ? JSON.stringify(data) : '';
|
||||
const cmd = `curl -iLX ${method} ` +
|
||||
`${options} ` +
|
||||
headers.map(header => `--header "${header}" `).join('') +
|
||||
`--data '${dataString}' ` +
|
||||
`${url}${extraPath}`;
|
||||
return helper.runCmd(cmd);
|
||||
};
|
||||
}
|
||||
|
||||
export interface PayloadData {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: number,
|
||||
build_parameters: {
|
||||
CIRCLE_JOB: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function payload(buildNum: number): PayloadData {
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: buildNum,
|
||||
build_parameters: { CIRCLE_JOB: 'aio_preview' },
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Exports
|
||||
export const helper = new Helper();
|
@ -1,6 +0,0 @@
|
||||
// Imports
|
||||
import {runTests} from '../common/run-tests';
|
||||
|
||||
// Run
|
||||
const specFiles = [`${__dirname}/**/*.e2e.js`];
|
||||
runTests(specFiles);
|
@ -1,7 +0,0 @@
|
||||
declare module jasmine {
|
||||
interface Matchers {
|
||||
toExistAsAFile(remove = true): boolean;
|
||||
toExistAsABuild(remove = true): boolean;
|
||||
toExistAsAnArtifact(remove = true): boolean;
|
||||
}
|
||||
}
|
@ -1,88 +0,0 @@
|
||||
import {sync as deleteEmpty} from 'delete-empty';
|
||||
import {existsSync, unlinkSync} from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {SHA} from './constants';
|
||||
import {helper} from './helper';
|
||||
|
||||
function checkFile(filePath: string, remove: boolean): boolean {
|
||||
const exists = existsSync(filePath);
|
||||
if (exists && remove) {
|
||||
// if we expected the file to exist then we remove it to prevent leftover file errors
|
||||
unlinkSync(filePath);
|
||||
}
|
||||
return exists;
|
||||
}
|
||||
|
||||
function getArtifactPath(prNum: number, sha: string = SHA): string {
|
||||
return `${AIO_DOWNLOADS_DIR}/${prNum}-${computeShortSha(sha)}-aio-snapshot.tgz`;
|
||||
}
|
||||
|
||||
function checkFiles(prNum: number, isPublic: boolean, sha: string, isLegacy: boolean, remove: boolean) {
|
||||
const files = ['/index.html', '/foo/bar.js'];
|
||||
const prPath = helper.getPrDir(prNum, isPublic);
|
||||
const shaPath = helper.getShaDir(prPath, sha, isLegacy);
|
||||
|
||||
const existingFiles: string[] = [];
|
||||
const missingFiles: string[] = [];
|
||||
files
|
||||
.map(file => join(shaPath, file))
|
||||
.forEach(file => (checkFile(file, remove) ? existingFiles : missingFiles).push(file));
|
||||
|
||||
deleteEmpty(prPath);
|
||||
|
||||
return { existingFiles, missingFiles };
|
||||
}
|
||||
|
||||
class ToExistAsAFile implements jasmine.CustomMatcher {
|
||||
public compare(actual: string, remove = true): jasmine.CustomMatcherResult {
|
||||
const pass = checkFile(actual, remove);
|
||||
return {
|
||||
message: `Expected file at "${actual}" ${pass ? 'not' : ''} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsAnArtifact implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, sha?: string}, remove = true): jasmine.CustomMatcherResult {
|
||||
const { prNum, sha = SHA } = actual;
|
||||
const filePath = getArtifactPath(prNum, sha);
|
||||
const pass = checkFile(filePath, remove);
|
||||
return {
|
||||
message: `Expected artifact "PR:${prNum}, SHA:${sha}, FILE:${filePath}" ${pass ? 'not' : '\b'} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsABuild implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}, remove = true):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const {missingFiles} = checkFiles(prNum, isPublic, sha, isLegacy, remove);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" to exist:\n` +
|
||||
missingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: missingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
public negativeCompare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const { existingFiles } = checkFiles(prNum, isPublic, sha, isLegacy, false);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" not to exist:\n` +
|
||||
existingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: existingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const customMatchers = {
|
||||
toExistAsABuild: () => new ToExistAsABuild(),
|
||||
toExistAsAFile: () => new ToExistAsAFile(),
|
||||
toExistAsAnArtifact: () => new ToExistAsAnArtifact(),
|
||||
};
|
@ -1,171 +0,0 @@
|
||||
/* tslint:disable:max-line-length */
|
||||
import * as nock from 'nock';
|
||||
import * as tar from 'tar-stream';
|
||||
import {gzipSync} from 'zlib';
|
||||
import {getEnvVar, Logger} from '../common/utils';
|
||||
import {BuildNums, PrNums, SHA} from './constants';
|
||||
|
||||
// We are using the `nock` library to fake responses from REST requests, when testing.
|
||||
// This is necessary, because the test preview-server runs as a separate node process to
|
||||
// the test harness, so we do not have direct access to the code (e.g. for mocking).
|
||||
// (See also 'lib/verify-setup/start-test-preview-server.ts'.)
|
||||
|
||||
// Each of the potential requests to an external API (e.g. Github or CircleCI) are mocked
|
||||
// below and return a suitable response. This is quite complicated to setup since the
|
||||
// response from, say, CircleCI will affect what request is made to, say, Github.
|
||||
|
||||
const logger = new Logger('mock-external-apis');
|
||||
|
||||
const log = (...args: any[]) => {
|
||||
// Filter out non-matching URL checks
|
||||
if (!/^matching.+: false$/.test(args[0])) {
|
||||
logger.log(...args);
|
||||
}
|
||||
};
|
||||
|
||||
const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
|
||||
const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||
|
||||
const ACTIVE_TRUSTED_USER = 'active-trusted-user';
|
||||
const INACTIVE_TRUSTED_USER = 'inactive-trusted-user';
|
||||
const UNTRUSTED_USER = 'untrusted-user';
|
||||
|
||||
const BASIC_BUILD_INFO = {
|
||||
branch: `pull/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
failed: false,
|
||||
reponame: AIO_GITHUB_REPO,
|
||||
username: AIO_GITHUB_ORGANIZATION,
|
||||
vcs_revision: SHA,
|
||||
};
|
||||
|
||||
const ISSUE_INFO_TRUSTED_LABEL = { labels: [{ name: AIO_TRUSTED_PR_LABEL }], user: { login: UNTRUSTED_USER } };
|
||||
const ISSUE_INFO_ACTIVE_TRUSTED_USER = { labels: [], user: { login: ACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_INACTIVE_TRUSTED_USER = { labels: [], user: { login: INACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_UNTRUSTED = { labels: [], user: { login: UNTRUSTED_USER } };
|
||||
const ACTIVE_STATE = { state: 'active' };
|
||||
const INACTIVE_STATE = { state: 'inactive' };
|
||||
|
||||
const TEST_TEAM_INFO = AIO_GITHUB_TEAM_SLUGS.map((slug, index) => ({ slug, id: index }));
|
||||
|
||||
const CIRCLE_CI_API_HOST = 'https://circleci.com';
|
||||
const CIRCLE_CI_TOKEN_PARAM = `circle-token=${AIO_CIRCLE_CI_TOKEN}`;
|
||||
const ARTIFACT_1 = { path: 'artifact-1', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-1`, _urlPath: '/artifacts/artifact-1' };
|
||||
const ARTIFACT_2 = { path: 'artifact-2', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-2`, _urlPath: '/artifacts/artifact-2' };
|
||||
const ARTIFACT_3 = { path: 'artifact-3', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-3`, _urlPath: '/artifacts/artifact-3' };
|
||||
const ARTIFACT_ERROR = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/error`, _urlPath: '/artifacts/error' };
|
||||
const ARTIFACT_404 = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/404`, _urlPath: '/artifacts/404' };
|
||||
const ARTIFACT_VALID_TRUSTED_USER = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/user`, _urlPath: '/artifacts/valid/user' };
|
||||
const ARTIFACT_VALID_TRUSTED_LABEL = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/label`, _urlPath: '/artifacts/valid/label' };
|
||||
const ARTIFACT_VALID_UNTRUSTED = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/untrusted`, _urlPath: '/artifacts/valid/untrusted' };
|
||||
|
||||
const CIRCLE_CI_BUILD_INFO_URL = `/api/v1.1/project/github/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}`;
|
||||
|
||||
const buildInfoUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildArtifactsUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}/artifacts?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildInfo = (prNum: number) => ({ ...BASIC_BUILD_INFO, branch: `pull/${prNum}` });
|
||||
|
||||
const GITHUB_API_HOST = 'https://api.github.com';
|
||||
const GITHUB_ISSUES_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/issues`;
|
||||
const GITHUB_PULLS_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/pulls`;
|
||||
const GITHUB_TEAMS_URL = `/orgs/${AIO_GITHUB_ORGANIZATION}/teams`;
|
||||
|
||||
const getIssueUrl = (prNum: number) => `${GITHUB_ISSUES_URL}/${prNum}`;
|
||||
const getFilesUrl = (prNum: number, pageNum = 1) => `${GITHUB_PULLS_URL}/${prNum}/files?page=${pageNum}&per_page=100`;
|
||||
const getCommentUrl = (prNum: number) => `${getIssueUrl(prNum)}/comments`;
|
||||
const getTeamMembershipUrl = (teamId: number, username: string) => `/teams/${teamId}/memberships/${username}`;
|
||||
|
||||
const createArchive = (buildNum: number, prNum: number, sha: string) => {
|
||||
logger.log('createArchive', buildNum, prNum, sha);
|
||||
const pack = tar.pack();
|
||||
pack.entry({name: 'index.html'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /index.html`);
|
||||
pack.entry({name: 'foo/bar.js'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /foo/bar.js`);
|
||||
pack.finalize();
|
||||
const zip = gzipSync(pack.read());
|
||||
return zip;
|
||||
};
|
||||
|
||||
// Create request scopes
|
||||
const circleCiApi = nock(CIRCLE_CI_API_HOST).log(log).persist();
|
||||
const githubApi = nock(GITHUB_API_HOST).log(log).persist().matchHeader('Authorization', `token ${AIO_GITHUB_TOKEN}`);
|
||||
|
||||
//////////////////////////////
|
||||
|
||||
// GENERAL responses
|
||||
githubApi.get(GITHUB_TEAMS_URL + '?page=1&per_page=100').reply(200, TEST_TEAM_INFO);
|
||||
githubApi.post(getCommentUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200);
|
||||
|
||||
// BUILD_INFO errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_ERROR)).replyWithError('BUILD_INFO_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_404)).reply(404, 'BUILD_INFO_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_BUILD_FAILED)).reply(200, { ...BASIC_BUILD_INFO, failed: true });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_ORG)).reply(200, { ...BASIC_BUILD_INFO, username: 'bad' });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_REPO)).reply(200, { ...BASIC_BUILD_INFO, reponame: 'bad' });
|
||||
|
||||
// CHANGED FILE errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_ERROR)).reply(200, buildInfo(PrNums.CHANGED_FILES_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_ERROR)).replyWithError('CHANGED_FILES_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_404)).reply(200, buildInfo(PrNums.CHANGED_FILES_404));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_404)).reply(404, 'CHANGED_FILES_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_NONE)).reply(200, buildInfo(PrNums.CHANGED_FILES_NONE));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_NONE)).reply(200, []);
|
||||
|
||||
// ARTIFACT URL errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).replyWithError('BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(404, 'BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, []);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, [ARTIFACT_1, ARTIFACT_2, ARTIFACT_3]);
|
||||
|
||||
// ARTIFACT DOWNLOAD errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, [ARTIFACT_ERROR]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).replyWithError(ARTIFACT_ERROR._urlPath);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, [ARTIFACT_404]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).reply(404, ARTIFACT_ERROR._urlPath);
|
||||
|
||||
// TRUST CHECK errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ERROR)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ERROR)).replyWithError('TRUST_CHECK_ERROR');
|
||||
|
||||
// ACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_USER._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_ACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// TRUSTED LABEL response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [ARTIFACT_VALID_TRUSTED_LABEL]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_LABEL._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_TRUSTED_LABEL, PrNums.TRUST_CHECK_TRUSTED_LABEL, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, ISSUE_INFO_TRUSTED_LABEL);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// INACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_INACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, INACTIVE_TRUSTED_USER)).reply(200, INACTIVE_STATE);
|
||||
|
||||
// UNTRUSTED reponse
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, buildInfo(PrNums.TRUST_CHECK_UNTRUSTED));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, [ARTIFACT_VALID_UNTRUSTED]);
|
||||
circleCiApi.get(ARTIFACT_VALID_UNTRUSTED._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_UNTRUSTED, PrNums.TRUST_CHECK_UNTRUSTED, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, ISSUE_INFO_UNTRUSTED);
|
||||
githubApi.get(getTeamMembershipUrl(0, UNTRUSTED_USER)).reply(404);
|
@ -1,405 +0,0 @@
|
||||
// Imports
|
||||
import * as path from 'path';
|
||||
import {rm} from 'shelljs';
|
||||
import {AIO_BUILDS_DIR, AIO_NGINX_HOSTNAME, AIO_NGINX_PORT_HTTP, AIO_NGINX_PORT_HTTPS} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {PrNums} from './constants';
|
||||
import {helper as h} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe(`nginx`, () => {
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
it('should redirect HTTP to HTTPS', done => {
|
||||
const httpHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTP}`;
|
||||
const httpsHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTPS}`;
|
||||
const urlMap = {
|
||||
[`http://${httpHost}/`]: `https://${httpsHost}/`,
|
||||
[`http://${httpHost}/foo`]: `https://${httpsHost}/foo`,
|
||||
[`http://foo.${httpHost}/`]: `https://foo.${httpsHost}/`,
|
||||
};
|
||||
|
||||
const verifyRedirection = (httpUrl: string) => h.runCmd(`curl -i ${httpUrl}`).then(result => {
|
||||
h.verifyResponse(307)(result);
|
||||
|
||||
const headers = result.stdout.split(/(?:\r?\n){2,}/)[0];
|
||||
expect(headers).toContain(`Location: ${urlMap[httpUrl]}`);
|
||||
});
|
||||
|
||||
Promise.
|
||||
all(Object.keys(urlMap).map(verifyRedirection)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = 9;
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
const shortSha9 = computeShortSha(sha9);
|
||||
const shortSha0 = computeShortSha(sha0);
|
||||
|
||||
|
||||
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
||||
|
||||
describe('(for public builds)', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyBuild(pr, sha9);
|
||||
h.createDummyBuild(pr, sha0);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
expect({ prNum: pr, sha: sha9 }).toExistAsABuild();
|
||||
expect({ prNum: pr, sha: sha0 }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js', done => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(h.verifyResponse(200, bodyRegex));
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 403 for directories', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths to files', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown PRs/SHAs', done => {
|
||||
const otherPr = 54321;
|
||||
const otherShortSha = computeShortSha('8'.repeat(40));
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}9.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherShortSha}.${host}`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the subdomain format is wrong', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://p${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://r${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('(for hidden builds)', () => {
|
||||
|
||||
it('should respond with 404 for any file or directory', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for any file or directory (for legacy builds)', async () => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false, false, true);
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const baseUrl = `${scheme}://${host}/can-have-public-preview`;
|
||||
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX POST ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX GET ${baseUrl}/${PrNums.CHANGED_FILES_ERROR}`).
|
||||
then(h.verifyResponse(500, /CHANGED_FILES_ERROR/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const cmdPrefix = `curl -iLX GET ${baseUrl}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}-foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}nfoo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/42/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/f00`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
|
||||
it('should disallow non-POST requests', done => {
|
||||
const url = `${scheme}://${host}/circle-build`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
|
||||
then(h.verifyResponse(400, /Incorrect body content. Expected JSON/)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncircle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build-foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-buildnfoo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/pr`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/42`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const url = `${scheme}://${host}/pr-updated`;
|
||||
|
||||
|
||||
it('should disallow non-POST requests', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', done => {
|
||||
const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`;
|
||||
|
||||
const cmd1 = `${cmdPrefix} ${url}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
beforeEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
return h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond with 404 for unknown URLs (even if the resource exists)', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html', 'foo'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
rm('-r', absFilePath);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
@ -1,569 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_PREVIEW_SERVER_HOSTNAME, AIO_PREVIEW_SERVER_PORT, AIO_WWW_USER} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA, SIMILAR_SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe('preview-server', () => {
|
||||
const hostname = AIO_PREVIEW_SERVER_HOSTNAME;
|
||||
const port = AIO_PREVIEW_SERVER_PORT;
|
||||
const host = `http://${hostname}:${port}`;
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const curl = makeCurl(`${host}/can-have-public-preview`, {
|
||||
defaultData: null,
|
||||
defaultExtraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
defaultHeaders: [],
|
||||
defaultMethod: 'GET',
|
||||
});
|
||||
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'POST'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `-foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `nfoo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}/foo`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/f00'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking for significant file changes fails', async () => {
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_404}`}).then(h.verifyResponse(500, /CHANGED_FILES_404/)),
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_ERROR}`}).then(h.verifyResponse(500, /CHANGED_FILES_ERROR/)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if no significant files were touched', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'No significant files touched.',
|
||||
});
|
||||
|
||||
await curl({extraPath: `/${PrNums.CHANGED_FILES_NONE}`}).then(h.verifyResponse(200, expectedResponse));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking "trusted" status fails', async () => {
|
||||
await curl({extraPath: `/${PrNums.TRUST_CHECK_ERROR}`}).then(h.verifyResponse(500, 'TRUST_CHECK_ERROR'));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if the PR is not automatically verifiable as "trusted"', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'Not automatically verifiable as \\"trusted\\".',
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_UNTRUSTED}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (true) if the PR can have a public preview', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: true,
|
||||
reason: null,
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_TRUSTED_LABEL}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
|
||||
const curl = makeCurl(`${host}/circle-build`);
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
await Promise.all([
|
||||
curl({url: `${host}/foo/circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/foo-circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/fooncircle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build-foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-buildnfoo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/pr`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build42`}).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 400 if the body is not valid', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: {} } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1 } } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1, build_parameters: {} } } }).then(h.verifyResponse(400)),
|
||||
curl(payload(0)).then(h.verifyResponse(400)),
|
||||
curl(payload(-1)).then(h.verifyResponse(400)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI API request errors', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_INFO_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if the build on CircleCI failed', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_BUILD_FAILED)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github org from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_ORG)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github repo from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_REPO)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github files API errors', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.CHANGED_FILES_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if no significant files are changed by the PR', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_NONE)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI artifact API fails', async () => {
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_404)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_EMPTY)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_MISSING)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if fetching the artifact errors', async () => {
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the GH trusted API fails', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should respond with 201 if a new public build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER))
|
||||
.then(h.verifyResponse(201));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should respond with 202 if a new private build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_UNTRUSTED)).then(h.verifyResponse(202));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_UNTRUSTED, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
[true].forEach(isPublic => {
|
||||
const build = isPublic ? BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const prNum = isPublic ? PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
const label = isPublic ? 'public' : 'non-public';
|
||||
const overwriteRe = RegExp(`^Request to overwrite existing ${label} directory`);
|
||||
const statusCode = isPublic ? 201 : 202;
|
||||
|
||||
describe(`for ${label} builds`, () => {
|
||||
|
||||
it('should extract the contents of the build artifact', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /index.html`);
|
||||
expect(h.readBuildFile(prNum, SHA, 'foo/bar.js', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /foo/bar.js`);
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should create files/directories owned by '${AIO_WWW_USER}'`, async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
const { stdout: allFiles } = await h.runCmd(`find ${shaDir}`);
|
||||
const { stdout: userFiles } = await h.runCmd(`find ${shaDir} -user ${AIO_WWW_USER}`);
|
||||
|
||||
expect(userFiles).toBe(allFiles);
|
||||
expect(userFiles).toContain(shaDir);
|
||||
expect(userFiles).toContain(join(shaDir, 'index.html'));
|
||||
expect(userFiles).toContain(join(shaDir, 'foo', 'bar.js'));
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should delete the build artifact file', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, SHA }).not.toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should make the build directory non-writable', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||
const isNotWritable = (fileOrDir: string) => {
|
||||
const mode = fs.statSync(fileOrDir).mode;
|
||||
// tslint:disable-next-line: no-bitwise
|
||||
return !(mode & parseInt('222', 8));
|
||||
};
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
expect(isNotWritable(shaDir)).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'index.html'))).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'foo', 'bar.js'))).toBe(true);
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)',
|
||||
async () => {
|
||||
// It is possible that 40-chars long build directories exist, if they had been deployed
|
||||
// before implementing the shorter build directory names. In that case, we don't want the
|
||||
// second (shorter) name to be considered the same as the old one (even if they originate
|
||||
// from the same SHA).
|
||||
|
||||
h.createDummyBuild(prNum, SHA, isPublic, false, true);
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic, true);
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, false)).toContain('index.html');
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: false }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds`, async () => {
|
||||
// setup a build already in place
|
||||
h.createDummyBuild(prNum, SHA, isPublic);
|
||||
// distinguish this build from the downloaded one
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds (even if the SHA is different)`, async () => {
|
||||
// Since only the first few characters of the SHA are used, it is possible for two different
|
||||
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
|
||||
// overwrite the first.
|
||||
expect(SIMILAR_SHA).not.toEqual(SHA);
|
||||
expect(computeShortSha(SIMILAR_SHA)).toEqual(computeShortSha(SHA));
|
||||
h.createDummyBuild(prNum, SIMILAR_SHA, isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toContain('index.html');
|
||||
h.writeBuildFile(prNum, SIMILAR_SHA, 'index.html', 'My content', isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic, sha: SIMILAR_SHA }).toExistAsABuild();
|
||||
expect({ prNum, sha: SIMILAR_SHA }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should only delete the SHA directory on error (for existing PR)', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
describe('when the PR\'s visibility has changed', () => {
|
||||
|
||||
it('should update the PR\'s visibility', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds (but keep the updated visibility)', async () => {
|
||||
h.createDummyBuild(prNum, SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic: !isPublic }).not.toExistAsABuild();
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
|
||||
it('should reject the request if it fails to update the PR\'s visibility', async () => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
|
||||
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(prNum, !isPublic)}' ` +
|
||||
`to existing directory '${h.getPrDir(prNum, isPublic)}'.`);
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, errorRegex));
|
||||
|
||||
expect({ prNum, isPublic }).not.toExistAsABuild();
|
||||
|
||||
// The bad folders should have been deleted
|
||||
expect({ prNum, sha: ALT_SHA, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, sha: ALT_SHA, isPublic: !isPublic }).toExistAsABuild();
|
||||
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const curl = makeCurl(`${host}/pr-updated`);
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a payload', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a \'number\' field', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: { number: null} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests for which checking the PR visibility fails', async () => {
|
||||
await curl({ data: { number: PrNums.TRUST_CHECK_ERROR } }).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const mockPayload = JSON.stringify({number: 1}); // MockExternalApiFlags.TRUST_CHECK_ACTIVE_TRUSTED_USER });
|
||||
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" ${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if PR\'s visibility is already up-to-date', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is already public.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild(remove);
|
||||
// Hidden build is already hidden.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if \'action\' implies no visibility change', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is hidden atm.
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
// Hidden build is public atm.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
describe('when the visiblity has changed', () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create initial PR builds with opposite visibilities as the ones that will be reported:
|
||||
// - The now public PR was previously hidden.
|
||||
// - The now hidden PR was previously public.
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(false);
|
||||
});
|
||||
afterEach(() => {
|
||||
// Expect PRs' visibility to have been updated:
|
||||
// - The public PR should be actually public (previously it was hidden).
|
||||
// - The hidden PR should be actually hidden (previously it was public).
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: undefined)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: labeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: unlabeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for requests to unknown URLs', done => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PUT ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
@ -1,269 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_NGINX_HOSTNAME} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const host = `${hostname}:${port}`;
|
||||
const curlPrUpdated = makeCurl(`${scheme}://${host}/pr-updated`);
|
||||
|
||||
const getFile = (pr: number, sha: string, file: string) =>
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${computeShortSha(sha)}.${host}/${file}`);
|
||||
const prUpdated = (prNum: number, action?: string) => curlPrUpdated({ data: { number: prNum, action } });
|
||||
const circleBuild = makeCurl(`${scheme}://${host}/circle-build`);
|
||||
|
||||
beforeEach(() => {
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000;
|
||||
jasmine.addMatchers(customMatchers);
|
||||
});
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe('for a new/non-existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to notify that a PR has been updated (and do nothing)', async () => {
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(200));
|
||||
// The PR should still not exist.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('for an existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix1 = `^PR: ${PR} \\| SHA: ${ALT_SHA} \\| File:`;
|
||||
const idxContentRegex1 = new RegExp(`${regexPrefix1} \\/index\\.html$`);
|
||||
const barContentRegex1 = new RegExp(`${regexPrefix1} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix2 = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex2 = new RegExp(`${regexPrefix2} \\/index\\.html$`);
|
||||
const barContentRegex2 = new RegExp(`${regexPrefix2} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex1)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex1)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex2)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex2)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: SHA, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
|
||||
const regexPrefix = `^PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(PR, SHA);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
await Promise.all([
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
h.createDummyBuild(PR, SHA, false);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if outdated)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
|
||||
// PR visibilities are outdated (i.e. the opposte of what the should).
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities should have been updated.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if up-to-date)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
|
||||
// PR visibilities are already up-to-date.
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities are still up-to-date.
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if re-checking visibility fails', async () => {
|
||||
const errorPr = PrNums.TRUST_CHECK_ERROR;
|
||||
|
||||
h.createDummyBuild(errorPr, SHA, true);
|
||||
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await prUpdated(errorPr).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if updating visibility fails', async () => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, false);
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, true);
|
||||
|
||||
const hiddenPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, false);
|
||||
const publicPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, true);
|
||||
const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`);
|
||||
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild(false);
|
||||
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(409, bodyRegex));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
@ -1,2 +0,0 @@
|
||||
import '../preview-server';
|
||||
import './mock-external-apis';
|
@ -1,30 +0,0 @@
|
||||
declare module 'tar-stream' {
|
||||
|
||||
import {Readable, Writable} from 'stream';
|
||||
|
||||
export interface Pack extends Readable {
|
||||
entry(header: Header, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, contents: string, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: Buffer, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: string|Buffer, callback?: (err?: any) => {}): Writable;
|
||||
finalize();
|
||||
destroy(err: any);
|
||||
}
|
||||
|
||||
export interface Header {
|
||||
name: string;
|
||||
mode?: number;
|
||||
uid?: number;
|
||||
gid?: number;
|
||||
size?: number;
|
||||
mtime?: Date;
|
||||
type?: type;
|
||||
linkname?: string;
|
||||
uname?: string;
|
||||
gname?: string;
|
||||
devmajor?: number;
|
||||
devminor?: number;
|
||||
}
|
||||
|
||||
export function pack(): Pack;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user