Compare commits
1 Commits
master
...
mhevery-pa
Author | SHA1 | Date | |
---|---|---|---|
![]() |
27a2515c74 |
97
.bazelignore
97
.bazelignore
@ -1,97 +0,0 @@
|
||||
# Bazel does not yet support wildcards or other .gitignore semantics for
|
||||
# .bazelignore. Two issues for this feature request are outstanding:
|
||||
# https://github.com/bazelbuild/bazel/issues/7093
|
||||
# https://github.com/bazelbuild/bazel/issues/8106
|
||||
.git
|
||||
node_modules
|
||||
dist
|
||||
aio/content
|
||||
aio/node_modules
|
||||
aio/tools/examples/shared/node_modules
|
||||
packages/bazel/node_modules
|
||||
integration/bazel/bazel-bazel
|
||||
integration/bazel/bazel-bin
|
||||
integration/bazel/bazel-out
|
||||
integration/bazel/bazel-testlogs
|
||||
integration/bazel-schematics/demo
|
||||
# All integration test node_modules folders
|
||||
integration/bazel/node_modules
|
||||
integration/bazel-schematics/node_modules
|
||||
integration/cli-hello-world/node_modules
|
||||
integration/cli-hello-world-ivy-compat/node_modules
|
||||
integration/cli-hello-world-ivy-i18n/node_modules
|
||||
integration/cli-hello-world-ivy-minimal/node_modules
|
||||
integration/cli-hello-world-lazy/node_modules
|
||||
integration/cli-hello-world-lazy-rollup/node_modules
|
||||
integration/dynamic-compiler/node_modules
|
||||
integration/hello_world__closure/node_modules
|
||||
integration/hello_world__systemjs_umd/node_modules
|
||||
integration/i18n/node_modules
|
||||
integration/injectable-def/node_modules
|
||||
integration/ivy-i18n/node_modules
|
||||
integration/language_service_plugin/node_modules
|
||||
integration/ng_elements/node_modules
|
||||
integration/ng_elements_schematics/node_modules
|
||||
integration/ng_update/node_modules
|
||||
integration/ng_update_migrations/node_modules
|
||||
integration/ngcc/node_modules
|
||||
integration/platform-server/node_modules
|
||||
integration/service-worker-schema/node_modules
|
||||
integration/side-effects/node_modules
|
||||
integration/terser/node_modules
|
||||
integration/typings_test_ts36/node_modules
|
||||
integration/typings_test_ts37/node_modules
|
||||
# All integration test .yarn_local_cache folders
|
||||
integration/bazel/.yarn_local_cache
|
||||
integration/bazel-schematics/.yarn_local_cache
|
||||
integration/cli-hello-world/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-compat/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-i18n/.yarn_local_cache
|
||||
integration/cli-hello-world-ivy-minimal/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy/.yarn_local_cache
|
||||
integration/cli-hello-world-lazy-rollup/.yarn_local_cache
|
||||
integration/dynamic-compiler/.yarn_local_cache
|
||||
integration/hello_world__closure/.yarn_local_cache
|
||||
integration/hello_world__systemjs_umd/.yarn_local_cache
|
||||
integration/i18n/.yarn_local_cache
|
||||
integration/injectable-def/.yarn_local_cache
|
||||
integration/ivy-i18n/.yarn_local_cache
|
||||
integration/language_service_plugin/.yarn_local_cache
|
||||
integration/ng_elements/.yarn_local_cache
|
||||
integration/ng_elements_schematics/.yarn_local_cache
|
||||
integration/ng_update/.yarn_local_cache
|
||||
integration/ng_update_migrations/.yarn_local_cache
|
||||
integration/ngcc/.yarn_local_cache
|
||||
integration/platform-server/.yarn_local_cache
|
||||
integration/service-worker-schema/.yarn_local_cache
|
||||
integration/side-effects/.yarn_local_cache
|
||||
integration/terser/.yarn_local_cache
|
||||
integration/typings_test_ts36/.yarn_local_cache
|
||||
integration/typings_test_ts37/.yarn_local_cache
|
||||
# All integration test NPM_PACKAGE_MANIFEST.json folders
|
||||
integration/bazel/NPM_PACKAGE_MANIFEST.json
|
||||
integration/bazel-schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-compat/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-ivy-minimal/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy/NPM_PACKAGE_MANIFEST.json
|
||||
integration/cli-hello-world-lazy-rollup/NPM_PACKAGE_MANIFEST.json
|
||||
integration/dynamic-compiler/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__closure/NPM_PACKAGE_MANIFEST.json
|
||||
integration/hello_world__systemjs_umd/NPM_PACKAGE_MANIFEST.json
|
||||
integration/i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/injectable-def/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ivy-i18n/NPM_PACKAGE_MANIFEST.json
|
||||
integration/language_service_plugin/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_elements_schematics/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ng_update_migrations/NPM_PACKAGE_MANIFEST.json
|
||||
integration/ngcc/NPM_PACKAGE_MANIFEST.json
|
||||
integration/platform-server/NPM_PACKAGE_MANIFEST.json
|
||||
integration/service-worker-schema/NPM_PACKAGE_MANIFEST.json
|
||||
integration/side-effects/NPM_PACKAGE_MANIFEST.json
|
||||
integration/terser/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts36/NPM_PACKAGE_MANIFEST.json
|
||||
integration/typings_test_ts37/NPM_PACKAGE_MANIFEST.json
|
146
.bazelrc
146
.bazelrc
@ -1,146 +0,0 @@
|
||||
# Enable debugging tests with --config=debug
|
||||
test:debug --test_arg=--node_options=--inspect-brk --test_output=streamed --test_strategy=exclusive --test_timeout=9999 --nocache_test_results
|
||||
|
||||
###############################
|
||||
# Filesystem interactions #
|
||||
###############################
|
||||
|
||||
# Create symlinks in the project:
|
||||
# - dist/bin for outputs
|
||||
# - dist/testlogs, dist/genfiles
|
||||
# - bazel-out
|
||||
# NB: bazel-out should be excluded from the editor configuration.
|
||||
# The checked-in /.vscode/settings.json does this for VSCode.
|
||||
# Other editors may require manual config to ignore this directory.
|
||||
# In the past, we say a problem where VSCode traversed a massive tree, opening file handles and
|
||||
# eventually a surprising failure with auto-discovery of the C++ toolchain in
|
||||
# MacOS High Sierra.
|
||||
# See https://github.com/bazelbuild/bazel/issues/4603
|
||||
build --symlink_prefix=dist/
|
||||
|
||||
# Turn off legacy external runfiles
|
||||
build --nolegacy_external_runfiles
|
||||
run --nolegacy_external_runfiles
|
||||
test --nolegacy_external_runfiles
|
||||
|
||||
# Turn on --incompatible_strict_action_env which was on by default
|
||||
# in Bazel 0.21.0 but turned off again in 0.22.0. Follow
|
||||
# https://github.com/bazelbuild/bazel/issues/7026 for more details.
|
||||
# This flag is needed to so that the bazel cache is not invalidated
|
||||
# when running bazel via `yarn bazel`.
|
||||
# See https://github.com/angular/angular/issues/27514.
|
||||
build --incompatible_strict_action_env
|
||||
run --incompatible_strict_action_env
|
||||
test --incompatible_strict_action_env
|
||||
|
||||
# Do not build runfile trees by default. If an execution strategy relies on runfile
|
||||
# symlink teee, the tree is created on-demand. See: https://github.com/bazelbuild/bazel/issues/6627
|
||||
# and https://github.com/bazelbuild/bazel/commit/03246077f948f2790a83520e7dccc2625650e6df
|
||||
build --nobuild_runfile_links
|
||||
|
||||
###############################
|
||||
# Release support #
|
||||
# Turn on these settings with #
|
||||
# --config=release #
|
||||
###############################
|
||||
|
||||
# Releases should always be stamped with version control info
|
||||
# This command assumes node on the path and is a workaround for
|
||||
# https://github.com/bazelbuild/bazel/issues/4802
|
||||
build:release --workspace_status_command="yarn -s ng-dev release build-env-stamp"
|
||||
build:release --stamp
|
||||
|
||||
###############################
|
||||
# Output #
|
||||
###############################
|
||||
|
||||
# A more useful default output mode for bazel query
|
||||
# Prints eg. "ng_module rule //foo:bar" rather than just "//foo:bar"
|
||||
query --output=label_kind
|
||||
|
||||
# By default, failing tests don't print any output, it goes to the log file
|
||||
test --test_output=errors
|
||||
|
||||
################################
|
||||
# Settings for CircleCI #
|
||||
################################
|
||||
|
||||
# Bazel flags for CircleCI are in /.circleci/bazel.linux.rc and /.circleci/bazel.windows.rc
|
||||
|
||||
##################################
|
||||
# Settings for integration tests #
|
||||
##################################
|
||||
|
||||
# Trick bazel into treating BUILD files under integration/bazel as being regular files
|
||||
# This lets us glob() up all the files inside this integration test to make them inputs to tests
|
||||
# (Note, we cannot use common --deleted_packages because the bazel version command doesn't support it)
|
||||
build --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
query --deleted_packages=integration/bazel,integration/bazel/src,integration/bazel/src/hello-world,integration/bazel/test,integration/bazel/test/e2e
|
||||
|
||||
################################
|
||||
# Temporary Settings for Ivy #
|
||||
################################
|
||||
# To determine if the compiler used should be Ivy instead of ViewEngine, one can use `--config=ivy`
|
||||
# on any bazel target. This is a temporary flag until codebase is permanently switched to Ivy.
|
||||
build --define=angular_ivy_enabled=False
|
||||
|
||||
build:view-engine --define=angular_ivy_enabled=False
|
||||
build:ivy --define=angular_ivy_enabled=True
|
||||
|
||||
##################################
|
||||
# Remote Build Execution support #
|
||||
# Turn on these settings with #
|
||||
# --config=remote #
|
||||
##################################
|
||||
|
||||
# The following --define=EXECUTOR=remote will be able to be removed
|
||||
# once https://github.com/bazelbuild/bazel/issues/7254 is fixed
|
||||
build:remote --define=EXECUTOR=remote
|
||||
|
||||
# Set a higher timeout value, just in case.
|
||||
build:remote --remote_timeout=600
|
||||
|
||||
# Increase the default number of jobs by 50% because our build has lots of
|
||||
# parallelism
|
||||
build:remote --jobs=150
|
||||
build:remote --google_default_credentials
|
||||
|
||||
# Force remote exeuctions to consider the entire run as linux
|
||||
build:remote --cpu=k8
|
||||
build:remote --host_cpu=k8
|
||||
|
||||
# Toolchain and platform related flags
|
||||
build:remote --host_javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --javabase=@rbe_ubuntu1604_angular//java:jdk
|
||||
build:remote --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
|
||||
build:remote --crosstool_top=@rbe_ubuntu1604_angular//cc:toolchain
|
||||
build:remote --extra_toolchains=@rbe_ubuntu1604_angular//config:cc-toolchain
|
||||
build:remote --extra_execution_platforms=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --host_platform=//tools:rbe_ubuntu1604-angular
|
||||
build:remote --platforms=//tools:rbe_ubuntu1604-angular
|
||||
|
||||
# Remote instance and caching
|
||||
build:remote --remote_instance_name=projects/internal-200822/instances/default_instance
|
||||
build:remote --project_id=internal-200822
|
||||
build:remote --remote_cache=remotebuildexecution.googleapis.com
|
||||
build:remote --remote_executor=remotebuildexecution.googleapis.com
|
||||
|
||||
##################################
|
||||
# Saucelabs tests settings #
|
||||
# Turn on these settings with #
|
||||
# --config=saucelabs #
|
||||
##################################
|
||||
|
||||
# For saucelabs tests we don't want to enable flaky test attempts. Karma has its own integrated
|
||||
# retry mechanism and we do not want to retry unnecessarily if Karma already tried multiple times.
|
||||
test:saucelabs --flaky_test_attempts=1
|
||||
|
||||
####################################################
|
||||
# User bazel configuration
|
||||
# NOTE: This needs to be the *last* entry in the config.
|
||||
####################################################
|
||||
|
||||
# Load any settings which are specific to the current user. Needs to be *last* statement
|
||||
# in this config, as the user configuration should be able to overwrite flags from this file.
|
||||
try-import .bazelrc.user
|
@ -1,3 +0,0 @@
|
||||
3.2.0
|
||||
# [NB: this comment has to be after the first line, see https://github.com/bazelbuild/bazelisk/issues/117]
|
||||
# When updating the Bazel version you also need to update the RBE toolchains version in package.bzl
|
@ -1,19 +0,0 @@
|
||||
# Encryption
|
||||
|
||||
Based on https://github.com/circleci/encrypted-files
|
||||
|
||||
In the CircleCI web UI, we have a secret variable called `KEY`
|
||||
https://circleci.com/gh/angular/angular/edit#env-vars
|
||||
which is only exposed to non-fork builds
|
||||
(see "Pass secrets to builds from forked pull requests" under
|
||||
https://circleci.com/gh/angular/angular/edit#advanced-settings)
|
||||
|
||||
We use this as a symmetric AES encryption key to encrypt tokens like
|
||||
a GitHub token that enables publishing snapshots.
|
||||
|
||||
To create the github_token file, we take this approach:
|
||||
- Find the angular-builds:token in the internal pw database
|
||||
- Go inside the CircleCI default docker image so you use the same version of openssl as we will at runtime: `docker run --rm -it circleci/node:10.12`
|
||||
- echo "https://[token]:@github.com" > credentials
|
||||
- openssl aes-256-cbc -e -in credentials -out .circleci/github_token -k $KEY
|
||||
- If needed, base64-encode the result so you can copy-paste it out of docker: `base64 github_token`
|
@ -1,15 +0,0 @@
|
||||
# Settings in this file should be OS agnostic. Use the bazel.<OS>.rc files for OS specific settings.
|
||||
|
||||
# Don't be spammy in the logs
|
||||
build --noshow_progress
|
||||
|
||||
# Print all the options that apply to the build.
|
||||
# This helps us diagnose which options override others
|
||||
# (e.g. /etc/bazel.bazelrc vs. tools/bazel.rc)
|
||||
build --announce_rc
|
||||
|
||||
# Retry in the event of flakes, eg. https://circleci.com/gh/angular/angular/31309
|
||||
test --flaky_test_attempts=2
|
||||
|
||||
# More details on failures
|
||||
build --verbose_failures=true
|
@ -1,30 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to /etc/bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=/home/circleci/bazel_repository_cache
|
||||
|
||||
# Workaround https://github.com/bazelbuild/bazel/issues/3645
|
||||
# Bazel doesn't calculate the memory ceiling correctly when running under Docker.
|
||||
# Limit Bazel to consuming resources that fit in CircleCI "xlarge" class
|
||||
# https://circleci.com/docs/2.0/configuration-reference/#resource_class
|
||||
build --local_cpu_resources=8
|
||||
build --local_ram_resources=14336
|
||||
|
||||
# All build executed remotely should be done using our RBE configuration.
|
||||
build:remote --google_default_credentials
|
||||
|
||||
# Upload to GCP's Build Status viewer to allow for us to have better viewing of execution/build
|
||||
# logs. This is only done on CI as the BES (GCP's Build Status viewer) API requires credentials
|
||||
# from service accounts, rather than end user accounts.
|
||||
build:remote --bes_backend=buildeventservice.googleapis.com
|
||||
build:remote --bes_timeout=30s
|
||||
build:remote --bes_results_url="https://source.cloud.google.com/results/invocations/"
|
||||
|
||||
build --config=remote
|
@ -1,21 +0,0 @@
|
||||
# These options are enabled when running on CI
|
||||
# We do this by copying this file to $env:ProgramData\bazel.bazelrc at the start of the build.
|
||||
# See documentation in /docs/BAZEL.md
|
||||
|
||||
# Import config items common to both Linux and Windows setups.
|
||||
# https://docs.bazel.build/versions/master/guide.html#bazelrc-syntax-and-semantics
|
||||
try-import %workspace%/.circleci/bazel.common.rc
|
||||
|
||||
# Save downloaded repositories in a location that can be cached by CircleCI. This helps us
|
||||
# speeding up the analysis time significantly with Bazel managed node dependencies on the CI.
|
||||
build --repository_cache=C:/Users/circleci/bazel_repository_cache
|
||||
|
||||
# Manually set the local resources used in windows CI runs
|
||||
build --local_ram_resources=13500
|
||||
build --local_cpu_resources=4
|
||||
|
||||
# All windows jobs run on master and should use http caching
|
||||
build --remote_http_cache=https://storage.googleapis.com/angular-team-cache
|
||||
build --remote_accept_cached=true
|
||||
build --remote_upload_local_results=true
|
||||
build --google_default_credentials
|
@ -7,400 +7,71 @@
|
||||
# To validate changes, use an online parser, eg.
|
||||
# http://yaml-online-parser.appspot.com/
|
||||
|
||||
# CircleCI configuration version
|
||||
# Version 2.1 allows for extra config reuse features
|
||||
# https://circleci.com/docs/2.0/reusing-config/#getting-started-with-config-reuse
|
||||
version: 2.1
|
||||
# Variables
|
||||
|
||||
# We don't want to include the current branch name in the cache key because that would prevent
|
||||
# PRs from being able to restore the cache since the branch names are always different for PRs.
|
||||
# The cache key should only consist of dynamic values that change whenever something in the
|
||||
# cache changes. For example:
|
||||
# 1) yarn lock file changes --> cached "node_modules" are different.
|
||||
# 2) bazel repository definitions change --> cached bazel repositories are different.
|
||||
# Windows needs its own cache key because binaries in node_modules are different.
|
||||
# **NOTE 1 **: If you change the cache key prefix, also sync the cache_key_fallback to match.
|
||||
# **NOTE 2 **: Keep the static part of the cache key as prefix to enable correct fallbacks.
|
||||
# See https://circleci.com/docs/2.0/caching/#restoring-cache for how prefixes work in CircleCI.
|
||||
var_3: &cache_key v7-angular-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
# We invalidate the cache if the Bazel version changes because otherwise the `bazelisk` cache
|
||||
# folder will contain all previously used versions and ultimately cause the cache restoring to
|
||||
# be slower due to its growing size.
|
||||
var_4: &cache_key_fallback v7-angular-node-12-{{ checksum ".bazelversion" }}
|
||||
var_3_win: &cache_key_win v7-angular-win-node-12-{{ checksum ".bazelversion" }}-{{ checksum "yarn.lock" }}-{{ checksum "WORKSPACE" }}-{{ checksum "packages/bazel/package.bzl" }}-{{ checksum "aio/yarn.lock" }}
|
||||
var_4_win: &cache_key_win_fallback v7-angular-win-node-12-{{ checksum ".bazelversion" }}
|
||||
## IMPORTANT
|
||||
# If you change the `docker_image` version, also change the `cache_key` suffix and the version of
|
||||
# `com_github_bazelbuild_buildtools` in the `/WORKSPACE` file.
|
||||
var_1: &docker_image angular/ngcontainer:0.1.0
|
||||
var_2: &cache_key angular-{{ .Branch }}-{{ checksum "yarn.lock" }}-0.1.0
|
||||
|
||||
# Workspace initially persisted by the `setup` job, and then enhanced by `build-npm-packages` and
|
||||
# `build-ivy-npm-packages`.
|
||||
# https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
|
||||
# https://circleci.com/blog/deep-diving-into-circleci-workspaces/
|
||||
var_7: &workspace_location ~/
|
||||
# Settings common to each job
|
||||
anchor_1: &job_defaults
|
||||
working_directory: ~/ng
|
||||
docker:
|
||||
- image: *docker_image
|
||||
|
||||
# Filter to run a job on builds for pull requests only.
|
||||
var_8: &only_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- /pull\/\d+/
|
||||
# After checkout, rebase on top of master.
|
||||
# Similar to travis behavior, but not quite the same.
|
||||
# See https://discuss.circleci.com/t/1662
|
||||
anchor_2: &post_checkout
|
||||
post: git pull --ff-only origin "refs/pull/${CIRCLE_PULL_REQUEST//*pull\//}/merge"
|
||||
|
||||
# Filter to skip a job on builds for pull requests.
|
||||
var_9: &skip_on_pull_requests
|
||||
filters:
|
||||
branches:
|
||||
ignore:
|
||||
- /pull\/\d+/
|
||||
|
||||
# Filter to run a job on builds for the master branch only.
|
||||
var_10: &only_on_master
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
# Executor Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-executors
|
||||
# **NOTE 1**: Pin to exact images using an ID (SHA). See https://circleci.com/docs/2.0/circleci-images/#using-a-docker-image-id-to-pin-an-image-to-a-fixed-version.
|
||||
# (Using the tag in not necessary when pinning by ID, but include it anyway for documentation purposes.)
|
||||
# **NOTE 2**: If you change the version of the docker images, also change the `cache_key` suffix.
|
||||
executors:
|
||||
default-executor:
|
||||
parameters:
|
||||
resource_class:
|
||||
type: string
|
||||
default: medium
|
||||
docker:
|
||||
- image: circleci/node:12.14.1@sha256:f9de24fc0017059cc42ef7d07db060008af65a98b1f0cdd1ef3339213226bf6d
|
||||
resource_class: << parameters.resource_class >>
|
||||
working_directory: ~/ng
|
||||
|
||||
windows-executor:
|
||||
working_directory: ~/ng
|
||||
resource_class: windows.medium
|
||||
# CircleCI windows VMs do have the GitBash shell available:
|
||||
# https://github.com/CircleCI-Public/windows-preview-docs#shells
|
||||
# But in this specific case we really should not use it because Bazel must not be ran from
|
||||
# GitBash. These issues discuss why:
|
||||
# https://github.com/bazelbuild/bazel/issues/5751
|
||||
# https://github.com/bazelbuild/bazel/issues/5724#issuecomment-410194038
|
||||
# https://github.com/bazelbuild/bazel/issues/6339#issuecomment-441600879
|
||||
shell: powershell.exe -ExecutionPolicy Bypass
|
||||
machine:
|
||||
# Windows preview image that includes the following:
|
||||
# - Visual Studio 2019 build tools
|
||||
# - Node 12
|
||||
# - yarn 1.17
|
||||
# - Python 3 3.7.4
|
||||
image: windows-server-2019-vs2019:201908-02
|
||||
|
||||
# Command Definitions
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-reusable-commands
|
||||
commands:
|
||||
custom_attach_workspace:
|
||||
description: Attach workspace at a predefined location
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: *workspace_location
|
||||
|
||||
# Install shared libs used by Chrome that is either provisioned by
|
||||
# rules_webtesting or by puppeteer.
|
||||
install_chrome_libs:
|
||||
description: Install shared Chrome libs
|
||||
steps:
|
||||
- run:
|
||||
name: Install shared Chrome libs
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install GTK+ graphical user interface (libgtk-3-0), advanced linux sound architecture (libasound2)
|
||||
# and network security service libraries (libnss3) & X11 Screen Saver extension library (libssx1)
|
||||
# which are dependencies of chrome & needed for karma & protractor headless chrome tests.
|
||||
# This is a very small install which takes around 7s in comparing to using the full
|
||||
# circleci/node:x.x.x-browsers image.
|
||||
sudo apt-get -y install libgtk-3-0 libasound2 libnss3 libxss1
|
||||
|
||||
# Install java runtime which is required by some integration tests such as
|
||||
# //integration:hello_world__closure_test, //integration:i18n_test and
|
||||
# //integration:ng_elements_test to run the closure compiler
|
||||
install_java:
|
||||
description: Install java
|
||||
steps:
|
||||
- run:
|
||||
name: Install java
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install java runtime
|
||||
sudo apt-get install default-jre
|
||||
|
||||
# Initializes the CI environment by setting up common environment variables.
|
||||
init_environment:
|
||||
description: Initializing environment (setting up variables)
|
||||
steps:
|
||||
- run:
|
||||
name: Set up environment
|
||||
environment:
|
||||
CIRCLE_GIT_BASE_REVISION: << pipeline.git.base_revision >>
|
||||
CIRCLE_GIT_REVISION: << pipeline.git.revision >>
|
||||
command: ./.circleci/env.sh
|
||||
- run:
|
||||
# Configure git as the CircleCI `checkout` command does.
|
||||
# This is needed because we only checkout on the setup job.
|
||||
# Add GitHub to known hosts
|
||||
name: Configure git
|
||||
command: |
|
||||
mkdir -p ~/.ssh
|
||||
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' >> ~/.ssh/known_hosts
|
||||
git config --global url."ssh://git@github.com".insteadOf "https://github.com" || true
|
||||
git config --global gc.auto 0 || true
|
||||
|
||||
# Normally this would be an individual job instead of a command.
|
||||
# But startup and setup time for each individual windows job are high enough to discourage
|
||||
# many small jobs, so instead we use a command for setup unless the gain becomes significant.
|
||||
setup_win:
|
||||
description: Setup windows node environment
|
||||
steps:
|
||||
# Use the Linux workspace directly, as it already has checkout, rebased and node modules.
|
||||
- custom_attach_workspace
|
||||
# Install Bazel pre-requisites that aren't in the preconfigured CircleCI Windows VM.
|
||||
- run: ./.circleci/windows-env.ps1
|
||||
- run: node --version
|
||||
- run: yarn --version
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key_win
|
||||
- *cache_key_win_fallback
|
||||
# Reinstall to get windows binaries.
|
||||
- run: yarn install --frozen-lockfile --non-interactive
|
||||
# Install @bazel/bazelisk globally and use that for the first run.
|
||||
# Workaround for https://github.com/bazelbuild/rules_nodejs/issues/894
|
||||
# NB: the issue was for @bazel/bazel but the same problem applies to @bazel/bazelisk
|
||||
- run: yarn global add @bazel/bazelisk@$env:BAZELISK_VERSION
|
||||
- run: bazelisk info
|
||||
|
||||
notify_webhook_on_fail:
|
||||
description: Notify a webhook about failure
|
||||
parameters:
|
||||
# `webhook_url_env_var` are secret env vars defined in CircleCI project settings.
|
||||
# The URLs come from https://angular-team.slack.com/apps/A0F7VRE7N-circleci.
|
||||
webhook_url_env_var:
|
||||
type: env_var_name
|
||||
steps:
|
||||
- run:
|
||||
when: on_fail
|
||||
command: |
|
||||
notificationJson="{\"text\":\":x: \`$CIRCLE_JOB\` job for $CIRCLE_BRANCH branch failed on build $CIRCLE_BUILD_NUM: $CIRCLE_BUILD_URL :scream:\"}"
|
||||
curl --request POST --header "Content-Type: application/json" --data "$notificationJson" ${<< parameters.webhook_url_env_var >>}
|
||||
|
||||
# Job definitions
|
||||
# Jobs can include parameters that are passed in the workflow job invocation.
|
||||
# https://circleci.com/docs/2.0/reusing-config/#authoring-parameterized-jobs
|
||||
version: 2
|
||||
jobs:
|
||||
setup:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- checkout
|
||||
- init_environment
|
||||
- run:
|
||||
name: Rebase PR on target branch
|
||||
# After checkout, rebase on top of target branch.
|
||||
command: >
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then
|
||||
# User is required for rebase.
|
||||
git config user.name "angular-ci"
|
||||
git config user.email "angular-ci"
|
||||
# Rebase PR on top of target branch.
|
||||
node tools/rebase-pr.js
|
||||
else
|
||||
echo "This build is not over a PR, nothing to do."
|
||||
fi
|
||||
# This cache is saved in the build-npm-packages so that Bazel cache is also included.
|
||||
- restore_cache:
|
||||
keys:
|
||||
- *cache_key
|
||||
- *cache_key_fallback
|
||||
- run:
|
||||
name: Running Yarn install
|
||||
command: yarn install --frozen-lockfile --non-interactive
|
||||
# Yarn's requests sometimes take more than 10mins to complete.
|
||||
no_output_timeout: 45m
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
# Make the bazel directories and add a file to them if they don't exist already so that
|
||||
# persist_to_workspace does not fail.
|
||||
- run: |
|
||||
if [ ! -d ~/bazel_repository_cache ]; then
|
||||
mkdir ~/bazel_repository_cache
|
||||
touch ~/bazel_repository_cache/MARKER
|
||||
fi
|
||||
# Persist any changes at this point to be reused by further jobs.
|
||||
# **NOTE**: To add new content to the workspace, always persist on the same root.
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ./ng
|
||||
- ./bazel_repository_cache
|
||||
|
||||
lint:
|
||||
executor: default-executor
|
||||
<<: *job_defaults
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- checkout:
|
||||
<<: *post_checkout
|
||||
# Check BUILD.bazel formatting before we have a node_modules directory
|
||||
# Then we don't need any exclude pattern to avoid checking those files
|
||||
- run: 'buildifier -mode=check $(find . -type f \( -name BUILD.bazel -or -name BUILD \)) ||
|
||||
(echo "BUILD files not formatted. Please run ''yarn buildifier''" ; exit 1)'
|
||||
# Run the skylark linter to check our Bazel rules
|
||||
- run: 'find . -type f -name "*.bzl" |
|
||||
xargs java -jar /usr/local/bin/Skylint_deploy.jar ||
|
||||
(echo -e "\n.bzl files have lint errors. Please run ''yarn skylint''"; exit 1)'
|
||||
- restore_cache:
|
||||
key: *cache_key
|
||||
|
||||
- run: yarn -s tslint
|
||||
- run: yarn -s ng-dev format changed $CI_GIT_BASE_REVISION --check
|
||||
- run: yarn -s ts-circular-deps:check
|
||||
- run: yarn -s ng-dev commit-message validate-range --range $CI_COMMIT_RANGE
|
||||
- run: yarn install --frozen-lockfile --non-interactive
|
||||
- run: ./node_modules/.bin/gulp lint
|
||||
|
||||
test_aio:
|
||||
executor: default-executor
|
||||
build:
|
||||
<<: *job_defaults
|
||||
resource_class: large
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Build aio
|
||||
- run: yarn --cwd aio build --progress=false
|
||||
# Lint the code
|
||||
- run: yarn --cwd aio lint
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
# Run accessibility tests
|
||||
- run: yarn --cwd aio test-a11y-score-localhost
|
||||
- checkout:
|
||||
<<: *post_checkout
|
||||
- restore_cache:
|
||||
key: *cache_key
|
||||
|
||||
deploy_aio:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Deploy angular.io to production (if necessary)
|
||||
- run: setPublicVar_CI_STABLE_BRANCH
|
||||
- run: yarn --cwd aio deploy-production
|
||||
- run: bazel info release
|
||||
- run: bazel run @yarn//:yarn
|
||||
# Use bazel query so that we explicitly ask for all buildable targets to be built as well
|
||||
# This avoids waiting for a build command to finish before running the first test
|
||||
# See https://github.com/bazelbuild/bazel/issues/4257
|
||||
- run: bazel query --output=label '//modules/... union //packages/... union //tools/...' | xargs bazel test --config=ci
|
||||
|
||||
test_aio_local:
|
||||
parameters:
|
||||
viewengine:
|
||||
type: boolean
|
||||
default: false
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
# Build aio (with local Angular packages)
|
||||
- run: yarn --cwd aio build-local<<# parameters.viewengine >>-with-viewengine<</ parameters.viewengine >>-ci
|
||||
# Run unit tests
|
||||
- run: yarn --cwd aio test --progress=false --watch=false
|
||||
# Run e2e tests
|
||||
- run: yarn --cwd aio e2e --configuration=ci
|
||||
# Run PWA-score tests
|
||||
- run: yarn --cwd aio test-pwa-score-localhost $CI_AIO_MIN_PWA_SCORE
|
||||
|
||||
test_aio_tools:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
# Install
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run: yarn --cwd aio extract-cli-command-docs
|
||||
# Run tools tests
|
||||
- run: yarn --cwd aio tools-test
|
||||
- run: ./aio/aio-builds-setup/scripts/test.sh
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
aio_preview:
|
||||
executor: default-executor
|
||||
environment:
|
||||
AIO_SNAPSHOT_ARTIFACT_PATH: &aio_preview_artifact_path 'aio/tmp/snapshot.tgz'
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: ./aio/scripts/build-artifacts.sh $AIO_SNAPSHOT_ARTIFACT_PATH $CI_PULL_REQUEST $CI_COMMIT
|
||||
- store_artifacts:
|
||||
path: *aio_preview_artifact_path
|
||||
# The `destination` needs to be kept in synch with the value of
|
||||
# `AIO_ARTIFACT_PATH` in `aio/aio-builds-setup/Dockerfile`
|
||||
destination: aio/dist/aio-snapshot.tgz
|
||||
- run: node ./aio/scripts/create-preview $CIRCLE_BUILD_NUM
|
||||
|
||||
# This job should only be run on PR builds, where `CI_PULL_REQUEST` is not `false`.
|
||||
test_aio_preview:
|
||||
executor: default-executor
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- install_chrome_libs
|
||||
- run: yarn --cwd aio install --frozen-lockfile --non-interactive
|
||||
- run:
|
||||
name: Wait for preview and run tests
|
||||
command: node aio/scripts/test-preview.js $CI_PULL_REQUEST $CI_COMMIT $CI_AIO_MIN_PWA_SCORE
|
||||
|
||||
# The `build-npm-packages` tasks exist for backwards-compatibility with old scripts and
|
||||
# tests that rely on the pre-Bazel `dist/packages-dist` output structure (build.sh).
|
||||
# Having multiple jobs that independently build in this manner duplicates some work; we build
|
||||
# the bazel packages more than once. Even though we have a remote cache, these jobs will
|
||||
# typically run in parallel so up-to-date outputs will not be available at the time the build
|
||||
# starts.
|
||||
|
||||
# Build the view engine npm packages. No new jobs should depend on this.
|
||||
build-npm-packages:
|
||||
executor:
|
||||
name: default-executor
|
||||
resource_class: medium
|
||||
steps:
|
||||
- custom_attach_workspace
|
||||
- init_environment
|
||||
- run: node scripts/build/build-packages-dist.js
|
||||
|
||||
# Save the npm packages from //packages/... for other workflow jobs to read
|
||||
- persist_to_workspace:
|
||||
root: *workspace_location
|
||||
paths:
|
||||
- ng/dist/packages-dist
|
||||
- ng/dist/zone.js-dist
|
||||
|
||||
# Save dependencies and bazel repository cache to use on subsequent runs.
|
||||
- save_cache:
|
||||
key: *cache_key
|
||||
paths:
|
||||
- "node_modules"
|
||||
- "aio/node_modules"
|
||||
- "~/bazel_repository_cache"
|
||||
- "~/.cache/bazelisk"
|
||||
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
default_workflow:
|
||||
jobs:
|
||||
- setup:
|
||||
filters:
|
||||
branches:
|
||||
ignore: g3
|
||||
- lint:
|
||||
requires:
|
||||
- setup
|
||||
- build-npm-packages:
|
||||
requires:
|
||||
- setup
|
||||
- test_aio:
|
||||
requires:
|
||||
- setup
|
||||
- deploy_aio:
|
||||
requires:
|
||||
- test_aio
|
||||
- test_aio_local:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- test_aio_tools:
|
||||
requires:
|
||||
- build-npm-packages
|
||||
- aio_preview:
|
||||
# Only run on PR builds. (There can be no previews for non-PR builds.)
|
||||
<<: *only_on_pull_requests
|
||||
requires:
|
||||
- setup
|
||||
- test_aio_preview:
|
||||
requires:
|
||||
- aio_preview
|
||||
- lint
|
||||
- build
|
||||
|
@ -1,73 +0,0 @@
|
||||
####################################################################################################
|
||||
# Helpers for defining environment variables for CircleCI.
|
||||
#
|
||||
# In CircleCI, each step runs in a new shell. The way to share ENV variables across steps is to
|
||||
# export them from `$BASH_ENV`, which is automatically sourced at the beginning of every step (for
|
||||
# the default `bash` shell).
|
||||
#
|
||||
# See also https://circleci.com/docs/2.0/env-vars/#using-bash_env-to-set-environment-variables.
|
||||
####################################################################################################
|
||||
|
||||
# Set and print an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are public, i.e. it is OK for them to be
|
||||
# visible to anyone through the CI logs.
|
||||
#
|
||||
# Usage: `setPublicVar <name> <value>`
|
||||
function setPublicVar() {
|
||||
setSecretVar $1 "$2";
|
||||
echo "$1=$2";
|
||||
}
|
||||
|
||||
# Set (without printing) an environment variable.
|
||||
#
|
||||
# Use this function for setting environment variables that are secret, i.e. should not be visible to
|
||||
# everyone through the CI logs.
|
||||
#
|
||||
# Usage: `setSecretVar <name> <value>`
|
||||
function setSecretVar() {
|
||||
# WARNING: Secrets (e.g. passwords, access tokens) should NOT be printed.
|
||||
# (Keep original shell options to restore at the end.)
|
||||
local -r originalShellOptions=$(set +o);
|
||||
set +x -eu -o pipefail;
|
||||
|
||||
echo "export $1=\"${2:-}\";" >> $BASH_ENV;
|
||||
|
||||
# Restore original shell options.
|
||||
eval "$originalShellOptions";
|
||||
}
|
||||
|
||||
|
||||
# Create a function to set an environment variable, when called.
|
||||
#
|
||||
# Use this function for creating setter for public environment variables that require expensive or
|
||||
# time-consuming computaions and may not be needed. When needed, you can call this function to set
|
||||
# the environment variable (which will be available through `$BASH_ENV` from that point onwards).
|
||||
#
|
||||
# Arguments:
|
||||
# - `<name>`: The name of the environment variable. The generated setter function will be
|
||||
# `setPublicVar_<name>`.
|
||||
# - `<code>`: The code to run to compute the value for the variable. Since this code should be
|
||||
# executed lazily, it must be properly escaped. For example:
|
||||
# ```sh
|
||||
# # DO NOT do this:
|
||||
# createPublicVarSetter MY_VAR "$(whoami)"; # `whoami` will be evaluated eagerly
|
||||
#
|
||||
# # DO this isntead:
|
||||
# createPublicVarSetter MY_VAR "\$(whoami)"; # `whoami` will NOT be evaluated eagerly
|
||||
# ```
|
||||
#
|
||||
# Usage: `createPublicVarSetter <name> <code>`
|
||||
#
|
||||
# Example:
|
||||
# ```sh
|
||||
# createPublicVarSetter MY_VAR 'echo "FOO"';
|
||||
# echo $MY_VAR; # Not defined
|
||||
#
|
||||
# setPublicVar_MY_VAR;
|
||||
# source $BASH_ENV;
|
||||
# echo $MY_VAR; # FOO
|
||||
# ```
|
||||
function createPublicVarSetter() {
|
||||
echo "setPublicVar_$1() { setPublicVar $1 \"$2\"; }" >> $BASH_ENV;
|
||||
}
|
113
.circleci/env.sh
113
.circleci/env.sh
@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Variables
|
||||
readonly projectDir=$(realpath "$(dirname ${BASH_SOURCE[0]})/..")
|
||||
readonly envHelpersPath="$projectDir/.circleci/env-helpers.inc.sh";
|
||||
readonly bashEnvCachePath="$projectDir/.circleci/bash_env_cache";
|
||||
|
||||
# Load helpers and make them available everywhere (through `$BASH_ENV`).
|
||||
source $envHelpersPath;
|
||||
echo "source $envHelpersPath;" >> $BASH_ENV;
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define PUBLIC environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
# See https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables for more info.
|
||||
####################################################################################################
|
||||
setPublicVar CI "$CI"
|
||||
setPublicVar PROJECT_ROOT "$projectDir";
|
||||
setPublicVar CI_AIO_MIN_PWA_SCORE "62";
|
||||
# This is the branch being built; e.g. `pull/12345` for PR builds.
|
||||
setPublicVar CI_BRANCH "$CIRCLE_BRANCH";
|
||||
setPublicVar CI_BUILD_URL "$CIRCLE_BUILD_URL";
|
||||
setPublicVar CI_COMMIT "$CIRCLE_SHA1";
|
||||
# `CI_COMMIT_RANGE` is only used on push builds (a.k.a. non-PR, non-scheduled builds and rerun
|
||||
# workflows of such builds).
|
||||
setPublicVar CI_GIT_BASE_REVISION "${CIRCLE_GIT_BASE_REVISION}";
|
||||
setPublicVar CI_GIT_REVISION "${CIRCLE_GIT_REVISION}";
|
||||
setPublicVar CI_COMMIT_RANGE "$CIRCLE_GIT_BASE_REVISION..$CIRCLE_GIT_REVISION";
|
||||
setPublicVar CI_PULL_REQUEST "${CIRCLE_PR_NUMBER:-false}";
|
||||
setPublicVar CI_REPO_NAME "$CIRCLE_PROJECT_REPONAME";
|
||||
setPublicVar CI_REPO_OWNER "$CIRCLE_PROJECT_USERNAME";
|
||||
setPublicVar CI_PR_REPONAME "$CIRCLE_PR_REPONAME";
|
||||
setPublicVar CI_PR_USERNAME "$CIRCLE_PR_USERNAME";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define "lazy" PUBLIC environment variables for CircleCI.
|
||||
# (I.e. functions to set an environment variable when called.)
|
||||
####################################################################################################
|
||||
createPublicVarSetter CI_STABLE_BRANCH "\$(npm info @angular/core dist-tags.latest | sed -r 's/^\\s*([0-9]+\\.[0-9]+)\\.[0-9]+.*$/\\1.x/')";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SECRET environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setSecretVar CI_SECRET_AIO_DEPLOY_FIREBASE_TOKEN "$AIO_DEPLOY_TOKEN";
|
||||
setSecretVar CI_SECRET_PAYLOAD_FIREBASE_TOKEN "$ANGULAR_PAYLOAD_TOKEN";
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define SauceLabs environment variables for CircleCI.
|
||||
####################################################################################################
|
||||
setPublicVar SAUCE_USERNAME "angular-framework";
|
||||
setSecretVar SAUCE_ACCESS_KEY "0c731274ed5f-cbc9-16f4-021a-9835e39f";
|
||||
# TODO(josephperrott): Remove environment variables once all saucelabs tests are via bazel method.
|
||||
setPublicVar SAUCE_LOG_FILE /tmp/angular/sauce-connect.log
|
||||
setPublicVar SAUCE_READY_FILE /tmp/angular/sauce-connect-ready-file.lock
|
||||
setPublicVar SAUCE_PID_FILE /tmp/angular/sauce-connect-pid-file.lock
|
||||
setPublicVar SAUCE_TUNNEL_IDENTIFIER "angular-framework-${CIRCLE_BUILD_NUM}-${CIRCLE_NODE_INDEX}"
|
||||
# Amount of seconds we wait for sauceconnect to establish a tunnel instance. In order to not
|
||||
# acquire CircleCI instances for too long if sauceconnect failed, we need a connect timeout.
|
||||
setPublicVar SAUCE_READY_FILE_TIMEOUT 120
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Define environment variables for the `angular/components` repo unit tests job.
|
||||
####################################################################################################
|
||||
# We specifically use a directory within "/tmp" here because we want the cloned repo to be
|
||||
# completely isolated from angular/angular in order to avoid any bad interactions between
|
||||
# their separate build setups. **NOTE**: When updating the temporary directory, also update
|
||||
# the `save_cache` path configuration in `config.yml`
|
||||
setPublicVar COMPONENTS_REPO_TMP_DIR "/tmp/angular-components-repo"
|
||||
setPublicVar COMPONENTS_REPO_URL "https://github.com/angular/components.git"
|
||||
setPublicVar COMPONENTS_REPO_BRANCH "master"
|
||||
# **NOTE**: When updating the commit SHA, also update the cache key in the CircleCI `config.yml`.
|
||||
setPublicVar COMPONENTS_REPO_COMMIT "09e68db8ed5b1253f2fe38ff954ef0df019fc25a"
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir -p "$HOME/.config/gcloud";
|
||||
openssl aes-256-cbc -d -in "${projectDir}/.circleci/gcp_token" \
|
||||
-md md5 -k "$CIRCLE_PROJECT_REPONAME" -out "$HOME/.config/gcloud/application_default_credentials.json"
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
cp "${projectDir}/.circleci/bazel.linux.rc" "$HOME/.bazelrc";
|
||||
|
||||
####################################################################################################
|
||||
# Create shell script in /tmp for Bazel actions to access CI envs without
|
||||
# busting the cache. Used by payload-size.sh script in integration tests.
|
||||
####################################################################################################
|
||||
readonly bazelVarEnv="/tmp/bazel-ci-env.sh"
|
||||
echo "# Setup by /.circle/env.sh" > $bazelVarEnv
|
||||
echo "export PROJECT_ROOT=\"${PROJECT_ROOT}\";" >> $bazelVarEnv
|
||||
echo "export CI_BRANCH=\"${CI_BRANCH}\";" >> $bazelVarEnv
|
||||
echo "export CI_BUILD_URL=\"${CI_BUILD_URL}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT=\"${CI_COMMIT}\";" >> $bazelVarEnv
|
||||
echo "export CI_COMMIT_RANGE=\"${CI_COMMIT_RANGE}\";" >> $bazelVarEnv
|
||||
echo "export CI_PULL_REQUEST=\"${CI_PULL_REQUEST}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_NAME=\"${CI_REPO_NAME}\";" >> $bazelVarEnv
|
||||
echo "export CI_REPO_OWNER=\"${CI_REPO_OWNER}\";" >> $bazelVarEnv
|
||||
echo "export CI_SECRET_PAYLOAD_FIREBASE_TOKEN=\"${CI_SECRET_PAYLOAD_FIREBASE_TOKEN}\";" >> $bazelVarEnv
|
||||
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
## Source `$BASH_ENV` to make the variables available immediately. ##
|
||||
## ***NOTE: This must remain the the last action in this script*** ##
|
||||
####################################################################################################
|
||||
####################################################################################################
|
||||
source $BASH_ENV;
|
Binary file not shown.
Binary file not shown.
@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Install bazel remote cache proxy
|
||||
# This is temporary until the feature is no longer experimental on CircleCI.
|
||||
# See remote cache documentation in /docs/BAZEL.md
|
||||
|
||||
set -u -e
|
||||
|
||||
readonly DOWNLOAD_URL="https://5-116431813-gh.circle-artifacts.com/0/pkg/bazel-remote-proxy-$(uname -s)_$(uname -m)"
|
||||
|
||||
curl --fail -o ~/bazel-remote-proxy "$DOWNLOAD_URL"
|
||||
chmod +x ~/bazel-remote-proxy
|
@ -1,105 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Usage (cli):
|
||||
* ```
|
||||
* node create-preview <build-number> <job-name> <webhook-url>
|
||||
* ```
|
||||
*
|
||||
* Usage (JS):
|
||||
* ```js
|
||||
* require('./trigger-webhook').
|
||||
* triggerWebhook(buildNumber, jobName, webhookUrl).
|
||||
* then(...);
|
||||
* ```
|
||||
*
|
||||
* Triggers a notification webhook with CircleCI specific info.
|
||||
*
|
||||
* It can be used for notifying external servers and trigger operations based on CircleCI job status
|
||||
* (e.g. triggering the creation of a preview based on previously stored build atrifacts).
|
||||
*
|
||||
* The body of the sent payload is of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "payload": {
|
||||
* "build_num": ${buildNumber}
|
||||
* "build_parameters": {
|
||||
* "CIRCLE_JOB": "${jobName}"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* When used from JS, it returns a promise which resolves to an object of the form:
|
||||
* ```json
|
||||
* {
|
||||
* "statucCode": ${statusCode},
|
||||
* "responseText": "${responseText}"
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* NOTE:
|
||||
* - When used from the cli, the command will exit with an error code if the response's status code
|
||||
* is outside the [200, 400) range.
|
||||
* - When used from JS, the returned promise will be resolved, even if the response's status code is
|
||||
* outside the [200, 400) range. It is up to the caller to decide how this should be handled.
|
||||
*/
|
||||
|
||||
// Imports
|
||||
const {request} = require('https');
|
||||
|
||||
// Exports
|
||||
module.exports = {
|
||||
triggerWebhook,
|
||||
};
|
||||
|
||||
// Run
|
||||
if (require.resolve === module) {
|
||||
_main(process.argv.slice(2));
|
||||
}
|
||||
|
||||
// Helpers
|
||||
function _main(args) {
|
||||
triggerWebhook(...args)
|
||||
.then(
|
||||
({statusCode, responseText}) => (200 <= statusCode && statusCode < 400) ?
|
||||
console.log(`Status: ${statusCode}\n${responseText}`) :
|
||||
Promise.reject(new Error(`Request failed (status: ${statusCode}): ${responseText}`)))
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function postJson(url, data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const opts = {method: 'post', headers: {'Content-Type': 'application/json'}};
|
||||
const onResponse = res => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
let responseText = '';
|
||||
|
||||
res.on('error', reject)
|
||||
.on('data', d => responseText += d)
|
||||
.on('end', () => resolve({statusCode, responseText}));
|
||||
};
|
||||
|
||||
request(url, opts, onResponse).on('error', reject).end(JSON.stringify(data));
|
||||
});
|
||||
}
|
||||
|
||||
async function triggerWebhook(buildNumber, jobName, webhookUrl) {
|
||||
if (!buildNumber || !jobName || !webhookUrl || isNaN(buildNumber)) {
|
||||
throw new Error(
|
||||
'Missing or invalid arguments.\n' +
|
||||
'Expected: buildNumber (number), jobName (string), webhookUrl (string)');
|
||||
}
|
||||
|
||||
const data = {
|
||||
payload: {
|
||||
build_num: +buildNumber,
|
||||
build_parameters: {CIRCLE_JOB: jobName},
|
||||
},
|
||||
};
|
||||
|
||||
return postJson(webhookUrl, data);
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
# Install Bazel pre-reqs on Windows
|
||||
# https://docs.bazel.build/versions/master/install-windows.html
|
||||
# https://docs.bazel.build/versions/master/windows.html
|
||||
# Install MSYS2 and packages
|
||||
choco install msys2 --version 20180531.0.0 --no-progress --package-parameters "/NoUpdate"
|
||||
C:\tools\msys64\usr\bin\bash.exe -l -c "pacman --needed --noconfirm -S zip unzip patch diffutils git"
|
||||
|
||||
# Add PATH modifications to the Powershell profile. This is the win equivalent of .bash_profile.
|
||||
# https://docs.microsoft.com/en-us/previous-versions//bb613488(v=vs.85)
|
||||
new-item -path $profile -itemtype file -force
|
||||
# Paths for nodejs, npm, yarn, and msys2. Use single quotes to prevent interpolation.
|
||||
# Add before the original path to use msys2 instead of the installed gitbash.
|
||||
Add-Content $profile '$Env:path = "${Env:ProgramFiles}\nodejs\;C:\Users\circleci\AppData\Roaming\npm\;${Env:ProgramFiles(x86)}\Yarn\bin\;C:\Users\circleci\AppData\Local\Yarn\bin\;C:\tools\msys64\usr\bin\;" + $Env:path'
|
||||
# Environment variables for Bazel
|
||||
Add-Content $profile '$Env:BAZEL_SH = "C:\tools\msys64\usr\bin\bash.exe"'
|
||||
|
||||
# Get the bazelisk version devdep and store it in a global var for use in the circleci job.
|
||||
$bazeliskVersion = & ${Env:ProgramFiles}\nodejs\node.exe -e "console.log(require('./package.json').devDependencies['@bazel/bazelisk'])"
|
||||
# This is a tricky situation: we want $bazeliskVersion to be evaluated but not $Env:BAZELISK_VERSION.
|
||||
# Formatting works https://stackoverflow.com/questions/32127583/expand-variable-inside-single-quotes
|
||||
$bazeliskVersionGlobalVar = '$Env:BAZELISK_VERSION = "{0}"' -f $bazeliskVersion
|
||||
Add-Content $profile $bazeliskVersionGlobalVar
|
||||
|
||||
# Remove the CircleCI checkout SSH override, because it breaks cloning repositories through Bazel.
|
||||
# See https://circleci.com/gh/angular/angular/401454 for an example.
|
||||
# TODO: is this really needed? Maybe there's a better way. It doesn't happen on Linux or on Codefresh.
|
||||
git config --global --unset url.ssh://git@github.com.insteadOf
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Decrypt GCP Credentials and store them as the Google default credentials.
|
||||
####################################################################################################
|
||||
mkdir ${env:APPDATA}\gcloud
|
||||
openssl aes-256-cbc -d -in .circleci\gcp_token -md md5 -out "$env:APPDATA\gcloud\application_default_credentials.json" -k "$env:CIRCLE_PROJECT_REPONAME"
|
||||
|
||||
####################################################################################################
|
||||
# Set bazel configuration for CircleCI runs.
|
||||
####################################################################################################
|
||||
copy .circleci\bazel.windows.rc ${Env:USERPROFILE}\.bazelrc
|
||||
|
||||
####################################################################################################
|
||||
# Install specific version of node.
|
||||
####################################################################################################
|
||||
choco install nodejs --version 12.14.1 --no-progress
|
||||
|
||||
# These Bazel prereqs aren't needed because the CircleCI image already includes them.
|
||||
# choco install yarn --version 1.16.0 --no-progress
|
||||
# choco install vcredist2015 --version 14.0.24215.20170201
|
||||
|
||||
# We don't need VS Build Tools for the tested bazel targets.
|
||||
# If it's needed again, uncomment these lines.
|
||||
# VS Build Tools are needed for Bazel C++ targets (like com_google_protobuf)
|
||||
# choco install visualstudio2019buildtools --version 16.1.2.0 --no-progress --package-parameters "--add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --add Microsoft.Component.VC.Runtime.UCRTSDK --add Microsoft.VisualStudio.Component.Windows10SDK.17763"
|
||||
# Add-Content $profile '$Env:BAZEL_VC = "${Env:ProgramFiles(x86)}\Microsoft Visual Studio\2019\BuildTools\VC\"'
|
||||
# Python is needed for Bazel Python targets
|
||||
# choco install python --version 3.5.1 --no-progress
|
@ -1,31 +0,0 @@
|
||||
# VSCode Remote Development - Developing inside a Containers
|
||||
|
||||
This folder contains configuration files that can be used to opt into working on this repository in a [Docker container](https://www.docker.com/resources/what-container) via [VSCode](https://code.visualstudio.com/)'s Remote Development feature (see below).
|
||||
|
||||
Info on remote development and developing inside a container with VSCode:
|
||||
- [VSCode: Remote Development](https://code.visualstudio.com/docs/remote/remote-overview)
|
||||
- [VSCode: Developing inside a Container](https://code.visualstudio.com/docs/remote/containers)
|
||||
- [VSCode: Remote Development FAQ](https://code.visualstudio.com/docs/remote/faq)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
_Prerequisite: [Install Docker](https://docs.docker.com/install) on your local environment._
|
||||
|
||||
To get started, read and follow the instuctions in [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The [.devcontainer/](.) directory contains pre-configured `devcontainer.json` and `Dockerfile` files, which you can use to set up remote development with a docker container.
|
||||
|
||||
In a nutshell, you need to:
|
||||
- Install the [Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension.
|
||||
- Copy [recommended-Dockerfile](./recommended-Dockerfile) to `Dockerfile` (and optionally tweak to suit your needs).
|
||||
- Copy [recommended-devcontainer.json](./recommended-devcontainer.json) to `devcontainer.json` (and optionally tweak to suit your needs).
|
||||
- Open VSCode and bring up the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
- Type `Remote-Containers: Open Folder in Container` and choose your local clone of [angular/angular](https://github.com/angular/angular).
|
||||
|
||||
The `.devcontainer/devcontainer.json` and `.devcontainer/Dockerfile` files are ignored by git, so you can have your own local versions. We may occasionally update the template files ([recommended-devcontainer.json](./recommended-devcontainer.json), [recommended-Dockerfile](./recommended-Dockerfile)), in which case you will need to manually update your local copies (if desired).
|
||||
|
||||
|
||||
## Updating `recommended-devcontainer.json` and `recommended-Dockerfile`
|
||||
|
||||
You can update and commit the recommended config files (which people use as basis for their local configs), if you find that something is broken, out-of-date or can be improved.
|
||||
|
||||
Please, keep in mind that any changes you make will potentially be used by many people on different environments. Try to keep these config files cross-platform compatible and free of personal preferences.
|
@ -1,24 +0,0 @@
|
||||
# Image metadata and config.
|
||||
FROM circleci/node:10-browsers # Ideally, the image version should be what we use on CI.
|
||||
# See `executors > browsers-executor` in `.circleci/config.yml`.
|
||||
|
||||
LABEL name="Angular dev environment" \
|
||||
description="This image can be used to create a dev environment for building Angular." \
|
||||
vendor="angular" \
|
||||
version="1.0"
|
||||
|
||||
EXPOSE 4000 4200 4433 5000 8080 9876
|
||||
|
||||
|
||||
# Switch to `root` (CircleCI images use `circleci` as the user).
|
||||
USER root
|
||||
|
||||
|
||||
# Configure `Node.js`/`npm` and install utilities.
|
||||
RUN npm config --global set user root
|
||||
RUN npm install --global yarn@latest # Ideally, the version should be what we use on CI.
|
||||
# See `commands > overwrite_yarn` in `.circleci/config.yml`.
|
||||
|
||||
|
||||
# Go! (And keep going.)
|
||||
CMD ["tail", "--follow", "/dev/null"]
|
@ -1,16 +0,0 @@
|
||||
// Reference: https://code.visualstudio.com/docs/remote/containers#_devcontainerjson-reference
|
||||
{
|
||||
"name": "Angular dev container",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": [4000, 4200, 4433, 5000, 8080, 9876],
|
||||
"postCreateCommand": "yarn install",
|
||||
"extensions": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
# https://editorconfig.org
|
||||
# http://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
|
3
.gitattributes
vendored
3
.gitattributes
vendored
@ -5,8 +5,5 @@
|
||||
*.js eol=lf
|
||||
*.ts eol=lf
|
||||
|
||||
# API guardian patch must always use LF for tests to work
|
||||
*.patch eol=lf
|
||||
|
||||
# Must keep Windows line ending to be parsed correctly
|
||||
scripts/windows/packages.txt eol=crlf
|
||||
|
59
.github/ISSUE_TEMPLATE.md
vendored
59
.github/ISSUE_TEMPLATE.md
vendored
@ -1,10 +1,57 @@
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
<!--
|
||||
PLEASE HELP US PROCESS GITHUB ISSUES FASTER BY PROVIDING THE FOLLOWING INFORMATION.
|
||||
|
||||
Please help us process issues more efficiently by filing an
|
||||
issue using one of the following templates:
|
||||
ISSUES MISSING IMPORTANT INFORMATION MAY BE CLOSED WITHOUT INVESTIGATION.
|
||||
-->
|
||||
|
||||
https://github.com/angular/angular/issues/new/choose
|
||||
## I'm submitting a...
|
||||
<!-- Check one of the following options with "x" -->
|
||||
<pre><code>
|
||||
[ ] Regression (a behavior that used to work and stopped working in a new release)
|
||||
[ ] Bug report <!-- Please search GitHub for a similar issue or PR before submitting -->
|
||||
[ ] Feature request
|
||||
[ ] Documentation issue or request
|
||||
[ ] Support request => Please do not submit support request here, instead see https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question
|
||||
</code></pre>
|
||||
|
||||
Thank you!
|
||||
## Current behavior
|
||||
<!-- Describe how the issue manifests. -->
|
||||
|
||||
🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑🛑
|
||||
|
||||
## Expected behavior
|
||||
<!-- Describe what the desired behavior would be. -->
|
||||
|
||||
|
||||
## Minimal reproduction of the problem with instructions
|
||||
<!--
|
||||
For bug reports please provide the *STEPS TO REPRODUCE* and if possible a *MINIMAL DEMO* of the problem via
|
||||
https://plnkr.co or similar (you can use this template as a starting point: http://plnkr.co/edit/tpl:AvJOMERrnz94ekVua0u5).
|
||||
-->
|
||||
|
||||
## What is the motivation / use case for changing the behavior?
|
||||
<!-- Describe the motivation or the concrete use case. -->
|
||||
|
||||
|
||||
## Environment
|
||||
|
||||
<pre><code>
|
||||
Angular version: X.Y.Z
|
||||
<!-- Check whether this is still an issue in the most recent Angular version -->
|
||||
|
||||
Browser:
|
||||
- [ ] Chrome (desktop) version XX
|
||||
- [ ] Chrome (Android) version XX
|
||||
- [ ] Chrome (iOS) version XX
|
||||
- [ ] Firefox version XX
|
||||
- [ ] Safari (desktop) version XX
|
||||
- [ ] Safari (iOS) version XX
|
||||
- [ ] IE version XX
|
||||
- [ ] Edge version XX
|
||||
|
||||
For Tooling issues:
|
||||
- Node version: XX <!-- run `node --version` -->
|
||||
- Platform: <!-- Mac, Linux, Windows -->
|
||||
|
||||
Others:
|
||||
<!-- Anything else relevant? Operating system version, IDE, package manager, HTTP server, ... -->
|
||||
</code></pre>
|
||||
|
22
.github/ISSUE_TEMPLATE/8-translate-docs.md
vendored
22
.github/ISSUE_TEMPLATE/8-translate-docs.md
vendored
@ -1,22 +0,0 @@
|
||||
---
|
||||
name: "📚Traducir doc al español"
|
||||
about: Solicitud para traducir ciertos docs al español
|
||||
|
||||
---
|
||||
|
||||
📚Traducir: <!-- ✍️ editar: --> creating-libraries.md
|
||||
|
||||
<!--🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅
|
||||
|
||||
Traducción de la documentación oficial de Angular a español
|
||||
|
||||
🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅🔅-->
|
||||
|
||||
|
||||
## Nombre del archivo:
|
||||
<!-- ✍️ editar: --> creating-libraries.md
|
||||
|
||||
|
||||
## Ruta donde se encuentra el archivo dentro del proyecto de Angular
|
||||
|
||||
<!-- ✍️ editar: --> https://github.com/angular-hispano/angular/blob/master/aio/content/guide/creating-libraries.md
|
62
.github/PULL_REQUEST_TEMPLATE.md
vendored
62
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,35 +1,43 @@
|
||||
## Lista de Verificación del PR
|
||||
Comprueba si tu PR cumple los siguientes requisitos:
|
||||
## PR Checklist
|
||||
Please check if your PR fulfills the following requirements:
|
||||
|
||||
- [ ] El mensaje de commit esta conforme con [nuestras reglas](https://github.com/angular-hispano/angular/blob/master/CONTRIBUTING.md#-formato-para-el-mensaje-de-los-commits)
|
||||
- [ ] Probe los cambios que agregué (arreglo de bugs / funcionalidades)
|
||||
- [ ] Revisé previamente las traducciones o cambios de contenido
|
||||
- [ ] Consulté el [diccionario de términos](https://github.com/angular-hispano/angular/blob/master/aio/diccionario-de-términos.md) en español
|
||||
- [ ] He creado dos archivos con la extensión correspondiente(.en.md para el archivo en inglés y .md para el Archivo en español)
|
||||
- [ ] He enlazado el commit con el issue correspondiente <!-- ejemplo Fixes #X -->
|
||||
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
||||
- [ ] Tests for the changes have been added (for bug fixes / features)
|
||||
- [ ] Docs have been added / updated (for bug fixes / features)
|
||||
|
||||
|
||||
## Tipo de PR
|
||||
¿Qué tipo de cambio introduce este PR?
|
||||
## PR Type
|
||||
What kind of change does this PR introduce?
|
||||
|
||||
<!-- Marca con una "x" las opciones que aplican. -->
|
||||
<!-- Please check the one that applies to this PR using "x". -->
|
||||
```
|
||||
[ ] Bugfix
|
||||
[ ] Feature
|
||||
[ ] Code style update (formatting, local variables)
|
||||
[ ] Refactoring (no functional changes, no api changes)
|
||||
[ ] Build related changes
|
||||
[ ] CI related changes
|
||||
[ ] Documentation content changes
|
||||
[ ] angular.io application / infrastructure changes
|
||||
[ ] Other... Please describe:
|
||||
```
|
||||
|
||||
- [ ] Bugfix
|
||||
- [ ] Funcionalidad
|
||||
- [ ] Actualización de el estilo del código (formato, variables locales)
|
||||
- [ ] Refactorización (no cambios en la funcionalidad, no cambios en el api)
|
||||
- [ ] Cambios relacionados al build
|
||||
- [ ] Cambios relacionados al CI (Integración continua)
|
||||
- [ ] Cambios en el contenido de la documentación
|
||||
- [ ] Cambios en la aplicación / infraestructura de angular.io
|
||||
- [ ] Otro... Por favor describe la:
|
||||
## What is the current behavior?
|
||||
<!-- Please describe the current behavior that you are modifying, or link to a relevant issue. -->
|
||||
|
||||
## ¿Cuál es el comportamiento actual?
|
||||
<!-- Describe el comportamiento actual que está modificando o vincule a un problema relevante.
|
||||
-->
|
||||
Issue Number: N/A
|
||||
|
||||
|
||||
## ¿Cuál es el nuevo comportamiento?
|
||||
<!--
|
||||
Ejemplo: Archivo en inglés traducido al español
|
||||
-->
|
||||
## What is the new behavior?
|
||||
|
||||
|
||||
## Does this PR introduce a breaking change?
|
||||
```
|
||||
[ ] Yes
|
||||
[ ] No
|
||||
```
|
||||
|
||||
<!-- If this PR contains a breaking change, please describe the impact and migration path for existing applications below. -->
|
||||
|
||||
|
||||
## Other information
|
||||
|
147
.github/angular-robot.yml
vendored
147
.github/angular-robot.yml
vendored
@ -1,11 +1,5 @@
|
||||
# Configuration for angular-robot
|
||||
|
||||
#options for the size plugin
|
||||
size:
|
||||
disabled: false
|
||||
maxSizeIncrease: 2000
|
||||
circleCiStatusName: "ci/circleci: test_ivy_aot"
|
||||
|
||||
# options for the merge plugin
|
||||
merge:
|
||||
# the status will be added to your pull requests
|
||||
@ -19,59 +13,6 @@ merge:
|
||||
# text to show when some checks are failing
|
||||
failureText: "The following checks are failing:"
|
||||
|
||||
# the g3 status will be added to your pull requests if they include files that match the patterns
|
||||
g3Status:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the name of the status
|
||||
context: "google3"
|
||||
# text to show when the status is pending, {{PRNumber}} will be replaced by the PR number
|
||||
pendingDesc: "Googler: run g3sync presubmit {{PRNumber}}"
|
||||
# text to show when the status is success
|
||||
successDesc: "Does not affect google3"
|
||||
# link to use for the details
|
||||
url: "http://go/angular/g3sync"
|
||||
# list of patterns to check for the files changed by the PR
|
||||
# this list must be manually kept in sync with google3/third_party/javascript/angular2/copy.bara.sky
|
||||
include:
|
||||
- "LICENSE"
|
||||
- "modules/benchmarks/**"
|
||||
- "modules/system.d.ts"
|
||||
- "packages/**"
|
||||
# list of patterns to ignore for the files changed by the PR
|
||||
exclude:
|
||||
- "packages/*"
|
||||
- "packages/bazel/*"
|
||||
- "packages/bazel/src/api-extractor/**"
|
||||
- "packages/bazel/src/builders/**"
|
||||
- "packages/bazel/src/ng_package/**"
|
||||
- "packages/bazel/src/protractor/**"
|
||||
- "packages/bazel/src/schematics/**"
|
||||
- "packages/compiler-cli/ngcc/**"
|
||||
- "packages/docs/**"
|
||||
- "packages/elements/schematics/**"
|
||||
- "packages/examples/**"
|
||||
- "packages/language-service/**"
|
||||
- "packages/localize/**"
|
||||
- "packages/private/**"
|
||||
- "packages/service-worker/**"
|
||||
- "**/.gitignore"
|
||||
- "**/.gitkeep"
|
||||
- "**/yarn.lock"
|
||||
- "**/package.json"
|
||||
- "**/third_party/**"
|
||||
- "**/tsconfig-build.json"
|
||||
- "**/tsconfig.json"
|
||||
- "**/rollup.config.js"
|
||||
- "**/BUILD.bazel"
|
||||
- "**/*.md"
|
||||
- "packages/**/integrationtest/**"
|
||||
- "packages/**/test/**"
|
||||
- "packages/zone.js/*"
|
||||
- "packages/zone.js/doc/**"
|
||||
- "packages/zone.js/example/**"
|
||||
- "packages/zone.js/scripts/**"
|
||||
|
||||
# comment that will be added to a PR when there is a conflict, leave empty or set to false to disable
|
||||
mergeConflictComment: "Hi @{{PRAuthor}}! This PR has merge conflicts due to recent upstream merges.
|
||||
\nPlease help to unblock it by resolving these conflicts. Thanks!"
|
||||
@ -79,48 +20,31 @@ merge:
|
||||
# label to monitor
|
||||
mergeLabel: "PR action: merge"
|
||||
|
||||
# adding any of these labels will also add the merge label
|
||||
mergeLinkedLabels:
|
||||
- "PR action: merge-assistance"
|
||||
|
||||
# list of checks that will determine if the merge label can be added
|
||||
checks:
|
||||
|
||||
# require that the PR has reviews from all requested reviewers
|
||||
#
|
||||
# This enables us to request reviews from both eng and tech writers, or multiple eng folks, and prevents accidental merges.
|
||||
# Rather than merging PRs with pending reviews, if all approvals are obtained and additional reviews are not needed, any pending reviewers should be removed via GitHub UI (this also leaves an audit trail behind these decisions).
|
||||
requireReviews: true,
|
||||
|
||||
# whether the PR shouldn't have a conflict with the base branch
|
||||
noConflict: true
|
||||
# list of labels that a PR needs to have, checked with a regexp (e.g. "PR target:" will work for the label "PR target: master")
|
||||
requiredLabels:
|
||||
- "PR target: *"
|
||||
- "PR target:"
|
||||
- "cla: yes"
|
||||
|
||||
# list of labels that a PR shouldn't have, checked after the required labels with a regexp
|
||||
forbiddenLabels:
|
||||
- "PR target: TBD"
|
||||
- "PR action: cleanup"
|
||||
- "PR action: review"
|
||||
- "PR state: blocked"
|
||||
- "cla: no"
|
||||
|
||||
# list of PR statuses that need to be successful
|
||||
requiredStatuses:
|
||||
- "continuous-integration/travis-ci/pr"
|
||||
- "code-review/pullapprove"
|
||||
- "ci/circleci: build"
|
||||
- "ci/circleci: lint"
|
||||
- "ci/circleci: publish_snapshot"
|
||||
- "ci/angular: size"
|
||||
- "cla/google"
|
||||
- "google3"
|
||||
- "pullapprove"
|
||||
|
||||
|
||||
# the comment that will be added when the merge label is added despite failing checks, leave empty or set to false to disable
|
||||
# {{MERGE_LABEL}} will be replaced by the value of the mergeLabel option
|
||||
# {{PLACEHOLDER}} will be replaced by the list of failing checks
|
||||
# the comment that will be added when the merge label is added despite failing checks, leave empty or set to false to disable
|
||||
# {{MERGE_LABEL}} will be replaced by the value of the mergeLabel option
|
||||
# {{PLACEHOLDER}} will be replaced by the list of failing checks
|
||||
mergeRemovedComment: "I see that you just added the `{{MERGE_LABEL}}` label, but the following checks are still failing:
|
||||
\n{{PLACEHOLDER}}
|
||||
\n
|
||||
@ -130,60 +54,15 @@ merge:
|
||||
|
||||
# options for the triage plugin
|
||||
triage:
|
||||
# number of the milestone to apply when the issue has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the issue is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if an issue has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
# arrays of labels that determine if an issue is triaged
|
||||
triagedLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if an issue has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: bug/fix"
|
||||
- "severity*"
|
||||
- "freq*"
|
||||
- "comp: *"
|
||||
- "type: bug"
|
||||
- "severity"
|
||||
- "freq"
|
||||
- "comp:"
|
||||
-
|
||||
- "type: feature"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: refactor"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: RFC / Discussion / question"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: confusing"
|
||||
- "comp: *"
|
||||
-
|
||||
- "type: use-case"
|
||||
- "comp: *"
|
||||
|
||||
# options for the triage PR plugin
|
||||
triagePR:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# number of the milestone to apply when the PR has not been triaged yet
|
||||
needsTriageMilestone: 83,
|
||||
# number of the milestone to apply when the PR is triaged
|
||||
defaultMilestone: 82,
|
||||
# arrays of labels that determine if a PR has been triaged by the caretaker
|
||||
l1TriageLabels:
|
||||
-
|
||||
- "comp: *"
|
||||
# arrays of labels that determine if a PR has been fully triaged
|
||||
l2TriageLabels:
|
||||
-
|
||||
- "type: *"
|
||||
- "effort*"
|
||||
- "risk*"
|
||||
- "comp: *"
|
||||
|
||||
# options for rerunning CI
|
||||
rerunCircleCI:
|
||||
# set to true to disable
|
||||
disabled: false
|
||||
# the label which when added triggers a rerun of the default CircleCI workflow
|
||||
triggerRerunLabel: "PR action: rerun CI at HEAD"
|
||||
- "comp:"
|
||||
|
15
.github/workflows/lock-closed.yml
vendored
15
.github/workflows/lock-closed.yml
vendored
@ -1,15 +0,0 @@
|
||||
name: Lock closed inactive issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 16:00 every day
|
||||
- cron: '0 16 * * *'
|
||||
|
||||
jobs:
|
||||
lock_closed:
|
||||
if: github.repository == 'angular/angular'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: angular/dev-infra/github-actions/lock-closed@66462f6
|
||||
with:
|
||||
lock-bot-key: ${{ secrets.LOCK_BOT_PRIVATE_KEY }}
|
26
.gitignore
vendored
26
.gitignore
vendored
@ -1,28 +1,21 @@
|
||||
.DS_STORE
|
||||
|
||||
/dist/
|
||||
/bazel-out
|
||||
/integration/bazel/bazel-*
|
||||
*.log
|
||||
bazel-*
|
||||
e2e_test.*
|
||||
node_modules
|
||||
bower_components
|
||||
tools/gulp-tasks/cldr/cldr-data/
|
||||
|
||||
# Include when developing application packages.
|
||||
pubspec.lock
|
||||
.c9
|
||||
.idea/
|
||||
.devcontainer/*
|
||||
!.devcontainer/README.md
|
||||
!.devcontainer/recommended-devcontainer.json
|
||||
!.devcontainer/recommended-Dockerfile
|
||||
.settings/
|
||||
.vscode/launch.json
|
||||
.vscode/settings.json
|
||||
.vscode/tasks.json
|
||||
*.swo
|
||||
modules/.settings
|
||||
.vscode
|
||||
modules/.vscode
|
||||
.vimrc
|
||||
.nvimrc
|
||||
|
||||
# Don't check in secret files
|
||||
*secret.js
|
||||
@ -36,12 +29,3 @@ yarn-error.log
|
||||
|
||||
# rollup-test output
|
||||
/modules/rollup-test/dist/
|
||||
|
||||
# User specific bazel settings
|
||||
.bazelrc.user
|
||||
|
||||
.notes.md
|
||||
baseline.json
|
||||
|
||||
# Ignore .history for the xyz.local-history VSCode extension
|
||||
.history
|
||||
|
145
.gitmessage
145
.gitmessage
@ -1,145 +0,0 @@
|
||||
<type>(<scope>): <summary>
|
||||
|
||||
<Describe the motivation behind this change - explain WHY you are making this change. Wrap all lines
|
||||
at 100 characters.>
|
||||
|
||||
Fixes #<issue number>
|
||||
|
||||
# ────────────────────────────────────────── 100 chars ────────────────────────────────────────────┤
|
||||
|
||||
|
||||
# Example Commit Messages
|
||||
# =======================
|
||||
|
||||
|
||||
# ─── Example: Simple refactor ────────────────────────────────────────────────────────────────────┤
|
||||
# refactor(core): rename refreshDynamicEmbeddedViews to refreshEmbeddedViews
|
||||
#
|
||||
# Improve code readability. The original name no longer matches how the function is used.
|
||||
# ─────────────────────────────────────────────────────────────────────────────────────────────────┤
|
||||
|
||||
|
||||
# ─── Example: Simple docs change ─────────────────────────────────────────────────────────────────┤
|
||||
# docs: clarify the service limitation in providers.md guide
|
||||
#
|
||||
# Fixes #36332
|
||||
# ─────────────────────────────────────────────────────────────────────────────────────────────────┤
|
||||
|
||||
|
||||
# ─── Example: A bug fix ──────────────────────────────────────────────────────────────────────────┤
|
||||
# fix(ngcc): ensure lockfile is removed when `analyzeFn` fails
|
||||
#
|
||||
# Previously an error thrown in the `analyzeFn` would cause the ngcc process to exit immediately
|
||||
# without removing the lockfile, and potentially before the unlocker process had been successfully
|
||||
# spawned resulting in the lockfile being orphaned and left behind.
|
||||
#
|
||||
# Now we catch these errors and remove the lockfile as needed.
|
||||
# ─────────────────────────────────────────────────────────────────────────────────────────────────┤
|
||||
|
||||
|
||||
# ─── Example: Breaking change ────────────────────────────────────────────────────────────────────┤
|
||||
# feat(bazel): simplify ng_package by dropping esm5 and fesm5
|
||||
#
|
||||
# esm5 and fesm5 distributions are no longer needed and have been deprecated in the past.
|
||||
#
|
||||
# https://v9.angular.io/guide/deprecations#esm5-and-fesm5-code-formats-in-angular-npm-packages
|
||||
#
|
||||
# This commit modifies ng_package to no longer distribute these two formats in npm packages built by
|
||||
# ng_package (e.g. @angular/core).
|
||||
#
|
||||
# This commit intentionally doesn't fully clean up the ng_package rule to remove all traces of esm5
|
||||
# and fems5 build artifacts as that is a bigger cleanup and currently we are narrowing down the
|
||||
# scope of this change to the MVP needed for v10, which in this case is 'do not put esm5 and fesm5'
|
||||
# into the npm packages.
|
||||
#
|
||||
# More cleanup to follow: https://angular-team.atlassian.net/browse/FW-2143
|
||||
#
|
||||
# BREAKING CHANGE: esm5 and fesm5 format is no longer distributed in Angular's npm packages e.g.
|
||||
# @angular/core
|
||||
#
|
||||
# Angular CLI will automatically downlevel the code to es5 if differential loading is enabled in the
|
||||
# Angular project, so no action is required from Angular CLI users.
|
||||
#
|
||||
# If you are not using Angular CLI to build your application or library, and you need to be able to
|
||||
# build es5 artifacts, then you will need to downlevel the distributed Angular code to es5 on your
|
||||
# own.
|
||||
#
|
||||
#
|
||||
# Fixes #1234
|
||||
# ─────────────────────────────────────────────────────────────────────────────────────────────────┤
|
||||
|
||||
|
||||
|
||||
# Angular Commit Message Format
|
||||
# =============================
|
||||
#
|
||||
# The full specification of the Angular Commit Message Format can be found at
|
||||
# https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
||||
#
|
||||
# The following is an excerpt of the specification with the most commonly needed info.
|
||||
#
|
||||
# Each commit message consists of a *header*, a *body*, and a *footer*.
|
||||
#
|
||||
# <header>
|
||||
# <BLANK LINE>
|
||||
# <body>
|
||||
# <BLANK LINE>
|
||||
# <footer>
|
||||
#
|
||||
# The header is mandatory.
|
||||
#
|
||||
# The body is mandatory for all commits except for those of scope "docs". When the body is required
|
||||
# it must be at least 20 characters long.
|
||||
#
|
||||
# The footer is optional.
|
||||
#
|
||||
# Any line of the commit message cannot be longer than 100 characters.
|
||||
#
|
||||
#
|
||||
# Commit Message Header
|
||||
# ---------------------
|
||||
#
|
||||
# <type>(<scope>): <short summary>
|
||||
# │ │ │
|
||||
# │ │ └─⫸ Summary in present tense. Not capitalized. No period at the end.
|
||||
# │ │
|
||||
# │ └─⫸ Commit Scope: animations|bazel|benchpress|common|compiler|compiler-cli|core|
|
||||
# │ elements|forms|http|language-service|localize|platform-browser|
|
||||
# │ platform-browser-dynamic|platform-server|platform-webworker|
|
||||
# │ platform-webworker-dynamic|router|service-worker|upgrade|zone.js|
|
||||
# │ packaging|changelog|dev-infra|docs-infra|migrations|ngcc|ve
|
||||
# │ https://github.com/angular/angular/blob/master/CONTRIBUTING.md#scope
|
||||
# │
|
||||
# └─⫸ Commit Type: build|ci|docs|feat|fix|perf|refactor|style|test
|
||||
# https://github.com/angular/angular/blob/master/CONTRIBUTING.md#type
|
||||
#
|
||||
#
|
||||
# Commit Message Body
|
||||
# ---------------------
|
||||
#
|
||||
# Just as in the summary, use the imperative, present tense: "fix" not "fixed" nor "fixes".
|
||||
#
|
||||
# Explain the motivation for the change in the commit message body. This commit message should
|
||||
# explain WHY you are making the change. You can include a comparison of the previous behavior with
|
||||
# the new behavior in order to illustrate the impact of the change.
|
||||
#
|
||||
#
|
||||
# Commit Message Footer
|
||||
# ---------------------
|
||||
#
|
||||
# The footer can contain information about breaking changes and is also the place to reference
|
||||
# GitHub issues, Jira tickets, and other PRs that this commit closes or is related to.
|
||||
#
|
||||
# ```
|
||||
# BREAKING CHANGE: <breaking change summary>
|
||||
# <BLANK LINE>
|
||||
# <breaking change description + migration instructions>
|
||||
# <BLANK LINE>
|
||||
# <BLANK LINE>
|
||||
# Fixes #<issue number>
|
||||
# ```
|
||||
#
|
||||
# Breaking Change section should start with the phrase "BREAKING CHANGE: " followed by a summary of
|
||||
# the breaking change, a blank line, and a detailed description of the breaking change that also
|
||||
# includes migration instructions.
|
||||
#
|
@ -1,53 +0,0 @@
|
||||
import {CommitMessageConfig} from '../dev-infra/commit-message/config';
|
||||
|
||||
/**
|
||||
* The configuration for `ng-dev commit-message` commands.
|
||||
*/
|
||||
export const commitMessage: CommitMessageConfig = {
|
||||
maxLineLength: 120,
|
||||
minBodyLength: 20,
|
||||
minBodyLengthTypeExcludes: ['docs', 'upstream'],
|
||||
types: [
|
||||
'build',
|
||||
'ci',
|
||||
'docs',
|
||||
'feat',
|
||||
'fix',
|
||||
'perf',
|
||||
'refactor',
|
||||
'release',
|
||||
'style',
|
||||
'test',
|
||||
'upstream',
|
||||
],
|
||||
scopes: [
|
||||
'animations',
|
||||
'bazel',
|
||||
'benchpress',
|
||||
'changelog',
|
||||
'common',
|
||||
'compiler',
|
||||
'compiler-cli',
|
||||
'core',
|
||||
'dev-infra',
|
||||
'docs-infra',
|
||||
'elements',
|
||||
'forms',
|
||||
'http',
|
||||
'language-service',
|
||||
'localize',
|
||||
'migrations',
|
||||
'ngcc',
|
||||
'packaging',
|
||||
'platform-browser',
|
||||
'platform-browser-dynamic',
|
||||
'platform-server',
|
||||
'platform-webworker',
|
||||
'platform-webworker-dynamic',
|
||||
'router',
|
||||
'service-worker',
|
||||
'upgrade',
|
||||
've',
|
||||
'zone.js',
|
||||
]
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
import {commitMessage} from './commit-message';
|
||||
import {format} from './format';
|
||||
import {github} from './github';
|
||||
import {merge} from './merge';
|
||||
|
||||
module.exports = {
|
||||
commitMessage,
|
||||
format,
|
||||
github,
|
||||
merge,
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
import {FormatConfig} from '../dev-infra/format/config';
|
||||
|
||||
/**
|
||||
* Configuration for the `ng-dev format` command.
|
||||
*/
|
||||
export const format: FormatConfig = {
|
||||
'clang-format': {
|
||||
'matchers': [
|
||||
'**/*.{js,ts}',
|
||||
// TODO: burn down format failures and remove aio and integration exceptions.
|
||||
'!aio/**',
|
||||
'!integration/**',
|
||||
// Both third_party and .yarn are directories containing copied code which should
|
||||
// not be modified.
|
||||
'!third_party/**',
|
||||
'!.yarn/**',
|
||||
// Do not format d.ts files as they are generated
|
||||
'!**/*.d.ts',
|
||||
]
|
||||
},
|
||||
'buildifier': true
|
||||
};
|
@ -1,15 +0,0 @@
|
||||
# The file is inert unless it's explicitly included into the local git config via:
|
||||
#
|
||||
# ```
|
||||
# git config --add include.path '../.ng-dev/gitconfig'
|
||||
# ```
|
||||
#
|
||||
# Calling that command will append the following into `.git/config` of the current git workspace
|
||||
# (i.e. $GIT_DIR, typically `angular/.git/config`):
|
||||
#
|
||||
# ```
|
||||
# [include]
|
||||
# path = ../.ng-dev/gitconfig
|
||||
# ```
|
||||
[commit]
|
||||
template = .gitmessage
|
@ -1,11 +0,0 @@
|
||||
import {GithubConfig} from '../dev-infra/utils/config';
|
||||
|
||||
/**
|
||||
* Github configuration for the `ng-dev` command. This repository is used as
|
||||
* remote for the merge script and other utilities like `ng-dev pr rebase`.
|
||||
*/
|
||||
|
||||
export const github: GithubConfig = {
|
||||
owner: 'angular',
|
||||
name: 'angular'
|
||||
};
|
@ -1,38 +0,0 @@
|
||||
import {MergeConfig} from '../dev-infra/pr/merge/config';
|
||||
|
||||
/**
|
||||
* Configuration for the merge tool in `ng-dev`. This sets up the labels which
|
||||
* are respected by the merge script (e.g. the target labels).
|
||||
*/
|
||||
export const merge = (): MergeConfig => {
|
||||
// TODO: resume dynamically determining patch branch
|
||||
const patch = '10.0.x';
|
||||
return {
|
||||
githubApiMerge: false,
|
||||
claSignedLabel: 'cla: yes',
|
||||
mergeReadyLabel: /^PR action: merge(-assistance)?/,
|
||||
caretakerNoteLabel: 'PR action: merge-assistance',
|
||||
commitMessageFixupLabel: 'commit message fixup',
|
||||
labels: [
|
||||
{
|
||||
pattern: 'PR target: master-only',
|
||||
branches: ['master'],
|
||||
},
|
||||
{
|
||||
pattern: 'PR target: patch-only',
|
||||
branches: [patch],
|
||||
},
|
||||
{
|
||||
pattern: 'PR target: master & patch',
|
||||
branches: ['master', patch],
|
||||
},
|
||||
],
|
||||
requiredBaseCommits: {
|
||||
// PRs that target either `master` or the patch branch, need to be rebased
|
||||
// on top of the latest commit message validation fix.
|
||||
// These SHAs are the commits that update the required license text in the header.
|
||||
'master': '5aeb9a4124922d8ac08eb73b8f322905a32b0b3a',
|
||||
[patch]: '27b95ba64a5d99757f4042073fd1860e20e3ed24'
|
||||
},
|
||||
};
|
||||
};
|
1616
.pullapprove.yml
1616
.pullapprove.yml
File diff suppressed because it is too large
Load Diff
82
.travis.yml
Normal file
82
.travis.yml
Normal file
@ -0,0 +1,82 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
dist: trusty
|
||||
node_js:
|
||||
- '8.9.1'
|
||||
|
||||
addons:
|
||||
# firefox: "38.0"
|
||||
apt:
|
||||
sources:
|
||||
# needed to install g++ that is used by npms's native modules
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
# needed to install g++ that is used by npms's native modules
|
||||
- g++-4.8
|
||||
# https://docs.travis-ci.com/user/jwt
|
||||
jwt:
|
||||
# SAUCE_ACCESS_KEY<=secret for NGBUILDS_IO_KEY to work around travis-ci/travis-ci#7223, unencrypted value in valentine as NGBUILDS_IO_KEY>
|
||||
# we alias NGBUILDS_IO_KEY to $SAUCE_ACCESS_KEY in env.sh and set the SAUCE_ACCESS_KEY there
|
||||
- secure: "L7nrZwkAtFtYrP2DykPXgZvEKjkv0J/TwQ/r2QGxFTaBq4VZn+2Dw0YS7uCxoMqYzDwH0aAOqxoutibVpk8Z/16nE3tNmU5RzltMd6Xmt3qU2f/JDQLMo6PSlBodnjOUsDHJgmtrcbjhqrx/znA237BkNUu6UZRT7mxhXIZpn0U="
|
||||
branches:
|
||||
except:
|
||||
- g3
|
||||
|
||||
cache:
|
||||
yarn: true
|
||||
directories:
|
||||
- ./node_modules
|
||||
- ./.chrome/chromium
|
||||
- ./aio/node_modules
|
||||
|
||||
env:
|
||||
global:
|
||||
# GITHUB_TOKEN_ANGULAR=<github token, a personal access token of the angular-builds account, account access in valentine>
|
||||
# This is needed for the e2e Travis matrix task to publish packages to github for continuous packages delivery.
|
||||
- secure: "aCdHveZuY8AT4Jr1JoJB4LxZsnGWRe/KseZh1YXYe5UtufFCtTVHvUcLn0j2aLBF0KpdyS+hWf0i4np9jthKu2xPKriefoPgCMpisYeC0MFkwbmv+XlgkUbgkgVZMGiVyX7DCYXVahxIoOUjVMEDCbNiHTIrfEuyq24U3ok2tHc="
|
||||
# FIREBASE_TOKEN
|
||||
# This is needed for publishing builds to the "aio-staging" and "angular-io" firebase projects.
|
||||
# This token was generated using the aio-deploy@angular.io account using `firebase login:ci` and password from valentine
|
||||
- secure: "L5CyQmpwWtoR4Qi4xlWQh/cL1M6ZeJL4W4QAr4HdKFMgYt9h+Whqkymyh2NxwmCbPvWa7yUd+OiLQUDCY7L2VIg16hTwoe2CgYDyQA0BEwLzxtRrJXl93TfwMlrUx5JSIzAccD6D4sjtz8kSFMomK2Nls33xOXOukwyhVMjd0Cg="
|
||||
# ANGULAR_PAYLOAD_FIREBASE_TOKEN
|
||||
# This is for payload size data to "angular-payload-size" firebase project
|
||||
# This token was generated using the payload@angular.io account using `firebase login:ci` and password from valentine
|
||||
- secure: "SxotP/ymNy6uWAVbfwM9BlwETPEBpkRvU/F7fCtQDDic99WfQHzzUSQqHTk8eKk3GrGAOSL09vT0WfStQYEIGEoS5UHWNgOnelxhw+d5EnaoB8vQ0dKQBTK092hQg4feFprr+B/tCasyMV6mVwpUzZMbIJNn/Rx7H5g1bp+Gkfg="
|
||||
matrix:
|
||||
# Order: a slower build first, so that we don't occupy an idle travis worker waiting for others to complete.
|
||||
- CI_MODE=e2e
|
||||
- CI_MODE=e2e_2
|
||||
- CI_MODE=js
|
||||
- CI_MODE=saucelabs_required
|
||||
# deactivated, see #19768
|
||||
# - CI_MODE=browserstack_required
|
||||
- CI_MODE=saucelabs_optional
|
||||
- CI_MODE=browserstack_optional
|
||||
- CI_MODE=aio_tools_test
|
||||
- CI_MODE=aio
|
||||
- CI_MODE=aio_e2e AIO_SHARD=0
|
||||
- CI_MODE=aio_e2e AIO_SHARD=1
|
||||
- CI_MODE=bazel
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- env: "CI_MODE=saucelabs_optional"
|
||||
- env: "CI_MODE=browserstack_optional"
|
||||
|
||||
before_install:
|
||||
# source the env.sh script so that the exported variables are available to other scripts later on
|
||||
- source ./scripts/ci/env.sh print
|
||||
|
||||
install:
|
||||
- ./scripts/ci/install.sh
|
||||
|
||||
script:
|
||||
- ./scripts/ci/build.sh
|
||||
- ./scripts/ci/test.sh
|
||||
# deploy is part of 'script' and not 'after_success' so that we fail the build if the deployment fails
|
||||
- ./scripts/ci/deploy.sh
|
||||
- ./scripts/ci/angular.sh
|
||||
# all the scripts under this line will not quickly abort in case ${TRAVIS_TEST_RESULT} is 1 (job failure)
|
||||
- ./scripts/ci/cleanup.sh
|
||||
- ./scripts/ci/print-logs.sh
|
25
.vscode/README.md
vendored
25
.vscode/README.md
vendored
@ -1,25 +0,0 @@
|
||||
# VSCode Configuration
|
||||
|
||||
This folder contains opt-in [Workspace Settings](https://code.visualstudio.com/docs/getstarted/settings), [Tasks](https://code.visualstudio.com/docs/editor/tasks), [Launch Configurations](https://code.visualstudio.com/Docs/editor/debugging#_launch-configurations) and [Extension Recommendations](https://code.visualstudio.com/docs/editor/extension-gallery#_workspace-recommended-extensions) that the Angular team recommends using when working on this repository.
|
||||
|
||||
## Usage
|
||||
|
||||
To use the recommended configurations follow the steps below:
|
||||
|
||||
- install the recommneded extensions in `.vscode/extensions.json`
|
||||
- copy (or link) `.vscode/recommended-settings.json` to `.vscode/settings.json`
|
||||
- copy (or link) `.vscode/recommended-launch.json` to `.vscode/launch.json`
|
||||
- copy (or link) `.vscode/recommended-tasks.json` to `.vscode/tasks.json`
|
||||
- restart the editor
|
||||
|
||||
If you already have your custom workspace settings you should instead manually merge the file contents.
|
||||
|
||||
This isn't an automatic process so you will need to repeat it when settings are updated.
|
||||
|
||||
To see the recommended extensions select "Extensions: Show Recommended Extensions" in the [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette).
|
||||
|
||||
## Editing `.vscode/recommended-*.json` files
|
||||
|
||||
If you wish to add extra configuration items please keep in mind any modifications you make here will be used by many users.
|
||||
|
||||
Try to keep these settings/configuations to things that help facilitate the development process and avoid altering the user workflow whenever possible.
|
15
.vscode/extensions.json
vendored
15
.vscode/extensions.json
vendored
@ -1,15 +0,0 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"devondcarew.bazel-code",
|
||||
"gkalpak.aio-docs-utils",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"xaver.clang-format",
|
||||
// The following extensions are useful when working on angular.io (i.e. inside the `aio/` directory).
|
||||
//"angular.ng-template",
|
||||
//"dbaeumer.vscode-eslint",
|
||||
],
|
||||
}
|
85
.vscode/recommended-launch.json
vendored
85
.vscode/recommended-launch.json
vendored
@ -1,85 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": true,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "Attach to bazel test ... --config=debug (no source maps)",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": false,
|
||||
"sourceMaps": false,
|
||||
"localRoot": "${workspaceRoot}",
|
||||
"remoteRoot": "${workspaceRoot}",
|
||||
"stopOnEntry": false,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/acceptance",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test/render3",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
{
|
||||
"name": "IVY:packages/core/test",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"--config=debug"
|
||||
],
|
||||
"port": 9229,
|
||||
"address": "localhost",
|
||||
"restart": true,
|
||||
"sourceMaps": true,
|
||||
"timeout": 600000,
|
||||
},
|
||||
]
|
||||
}
|
32
.vscode/recommended-settings.json
vendored
32
.vscode/recommended-settings.json
vendored
@ -1,32 +0,0 @@
|
||||
{
|
||||
// Format js and ts files on save with `clang-format.executable`
|
||||
// If `clang-format.executable` is not being used, these two settings should be removed otherwise it will break existing formatting.
|
||||
// You can instead run `yarn gulp format` to manually format your code.
|
||||
"[javascript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
// Please install https://marketplace.visualstudio.com/items?itemName=xaver.clang-format to take advantage of `clang-format` in VSCode.
|
||||
// (See https://clang.llvm.org/docs/ClangFormat.html for more info `clang-format`.)
|
||||
"clang-format.executable": "${workspaceRoot}/node_modules/.bin/clang-format",
|
||||
// Exclude third party modules and build artifacts from the editor watchers/searches.
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/**": true,
|
||||
"**/bazel-out/**": true,
|
||||
"**/dist/**": true,
|
||||
"**/aio/src/generated/**": true,
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
"**/bower_components": true,
|
||||
"**/bazel-out": true,
|
||||
"**/dist": true,
|
||||
"**/aio/src/generated": true,
|
||||
".history": true,
|
||||
},
|
||||
"git.ignoreLimitWarning": true,
|
||||
}
|
113
.vscode/recommended-tasks.json
vendored
113
.vscode/recommended-tasks.json
vendored
@ -1,113 +0,0 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "IVY:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/...",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
"packages/core/test/acceptance",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test/acceptance",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test/acceptance",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "VE:packages/core/test",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"packages/core/test",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "IVY:packages/core/test/render3",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/node_modules/.bin/bazelisk",
|
||||
"args": [
|
||||
"test",
|
||||
"--config=ivy",
|
||||
"packages/core/test/render3",
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
# Yarn Vendoring
|
||||
We utilize Yarn's `yarn-path` configuration in a shared `.yarnrc` file to enforce
|
||||
everyone using the same version of Yarn. Yarn checks the `.yarnrc` file to
|
||||
determine if yarn should delegate the command to a vendored version at the
|
||||
provided path.
|
||||
|
||||
## How to update
|
||||
To update to the latest version of Yarn as our vendored version:
|
||||
- Run this command
|
||||
```sh
|
||||
yarn policies set-version latest
|
||||
```
|
||||
- Remove the previous version
|
147391
.yarn/releases/yarn-1.22.4.js
vendored
147391
.yarn/releases/yarn-1.22.4.js
vendored
File diff suppressed because one or more lines are too long
5
.yarnrc
5
.yarnrc
@ -1,5 +0,0 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
yarn-path ".yarn/releases/yarn-1.22.4.js"
|
76
BUILD.bazel
76
BUILD.bazel
@ -1,49 +1,63 @@
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
exports_files([
|
||||
"LICENSE",
|
||||
"karma-js.conf.js",
|
||||
"browser-providers.conf.js",
|
||||
"scripts/ci/track-payload-size.sh",
|
||||
"scripts/ci/payload-size.sh",
|
||||
"scripts/ci/payload-size.js",
|
||||
"package.json",
|
||||
"tsconfig.json",
|
||||
])
|
||||
|
||||
alias(
|
||||
name = "tsconfig.json",
|
||||
actual = "//packages:tsconfig-build.json",
|
||||
# This rule belongs in node_modules/BUILD
|
||||
# It's here as a workaround for
|
||||
# https://github.com/bazelbuild/bazel/issues/374#issuecomment-296217940
|
||||
filegroup(
|
||||
name = "node_modules",
|
||||
# Performance workaround: list individual files
|
||||
# Reduces the number of files as inputs to nodejs_binary:
|
||||
# bazel query "deps(:node_modules)" | wc -l
|
||||
# This won't scale in the general case.
|
||||
# TODO(alexeagle): figure out what to do
|
||||
srcs = glob(["/".join([
|
||||
"node_modules",
|
||||
pkg,
|
||||
"**",
|
||||
ext,
|
||||
]) for pkg in [
|
||||
"jasmine",
|
||||
"typescript",
|
||||
"zone.js",
|
||||
"tsutils",
|
||||
"@types",
|
||||
"tsickle",
|
||||
"hammerjs",
|
||||
"protobufjs",
|
||||
"bytebuffer",
|
||||
"reflect-metadata",
|
||||
"source-map-support",
|
||||
"minimist",
|
||||
] for ext in [
|
||||
"*.js",
|
||||
"*.json",
|
||||
"*.d.ts",
|
||||
]]),
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "web_test_bootstrap_scripts",
|
||||
# do not sort
|
||||
srcs = [
|
||||
"@npm//:node_modules/core-js/client/core.js",
|
||||
"//packages/zone.js/bundles:zone.umd.js",
|
||||
"//packages/zone.js/bundles:zone-testing.umd.js",
|
||||
"//packages/zone.js/bundles:task-tracking.umd.js",
|
||||
"//:test-events.js",
|
||||
"//:third_party/shims_for_IE.js",
|
||||
# Including systemjs because it defines `__eval`, which produces correct stack traces.
|
||||
"@npm//:node_modules/systemjs/dist/system.src.js",
|
||||
"@npm//:node_modules/reflect-metadata/Reflect.js",
|
||||
"//:node_modules/reflect-metadata/Reflect.js",
|
||||
"//:node_modules/zone.js/dist/zone.js",
|
||||
"//:node_modules/zone.js/dist/async-test.js",
|
||||
"//:node_modules/zone.js/dist/sync-test.js",
|
||||
"//:node_modules/zone.js/dist/fake-async-test.js",
|
||||
"//:node_modules/zone.js/dist/proxy.js",
|
||||
"//:node_modules/zone.js/dist/jasmine-patch.js",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "angularjs_scripts",
|
||||
name = "angularjs",
|
||||
# do not sort
|
||||
srcs = [
|
||||
# We also declare the unminfied AngularJS files since these can be used for
|
||||
# local debugging (e.g. see: packages/upgrade/test/common/test_helpers.ts)
|
||||
"@npm//:node_modules/angular/angular.js",
|
||||
"@npm//:node_modules/angular/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.js",
|
||||
"@npm//:node_modules/angular-1.5/angular.min.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.js",
|
||||
"@npm//:node_modules/angular-1.6/angular.min.js",
|
||||
"@npm//:node_modules/angular-mocks/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.5/angular-mocks.js",
|
||||
"@npm//:node_modules/angular-mocks-1.6/angular-mocks.js",
|
||||
"//:node_modules/angular/angular.js",
|
||||
"//:node_modules/angular-mocks/angular-mocks.js",
|
||||
],
|
||||
)
|
||||
|
3668
CHANGELOG.md
3668
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -1,72 +0,0 @@
|
||||
# Código de Conducta
|
||||
|
||||
## 1. Propósito
|
||||
|
||||
“Prometemos brindar cortesía y respeto a cualquier persona involucrada en esta comunidad, sin importar el género con el que se identifique, su orientación sexual, limitación física, edad, raza, etnia, religión o nivel de conocimiento. Esperamos que cualquiera que desee contribuir en este proyecto brinde el mismo comportamiento”
|
||||
|
||||
Bajo ese principio queremos enfocar esta comunidad, una comunidad de respeto por el otro, donde cualquiera que sienta pasión por Angular y desee involucrarse con cualquier tipo de actividad deberá ayudar a mantener una atmósfera de cortesía por el otro, respetando los pensamientos, acciones, ideales y propuestas del otro.
|
||||
|
||||
## 2. Comportamiento esperado
|
||||
|
||||
- Evitar usar expresiones o gestos insultantes, humillantes o intimidatorios para referirnos a otros.
|
||||
- Ejercita la consideración y el respeto en tus comunicaciones y acciones.
|
||||
- Absténte de adoptar una conducta y un lenguaje degradantes, discriminatorios, abusivos o acosadores.
|
||||
- Evitar comportamientos que agredan las expresiones de identidad, género, creencias religiosas, acciones que lastimen o los aíslen a otros.
|
||||
- Evitar comentarios sobre ideas políticas.
|
||||
- No se toleran conductas físicas y actitudes dirigidas al descrédito personal, físico o emocional de cualquier persona.
|
||||
- No se toleran chistes y comentarios excluyentes, sexistas y racistas.
|
||||
- Se rechaza actitudes de hostigamiento.
|
||||
- Se rechaza el acoso: este se entiende como cualquier tipo de comentario verbal que refuerce discriminación por género, identidad y expresión de género, orientación sexual, discapacidad, apariencia física, tamaño corporal, raza, edad o religión en contextos laborales o sociales.
|
||||
- No se tolera el contacto físico y/o la atención sexual no deseada.
|
||||
- Promover la igualdad de oportunidades de formación, educación, intercambio y retroalimentación antes, durante y después de los eventos.
|
||||
- Intenta colaborar en lugar de generar conflicto.
|
||||
- Sé consciente de tu entorno y de tus compañeros participantes. Alerta a líderes de la comunidad si notas una situación peligrosa, alguien en apuros, o violaciones de este Código de Conducta, incluso si parecen intrascendentes.
|
||||
|
||||
## 3. Comportamiento inaceptable
|
||||
Comportamientos inaceptables incluyen: intimidación, acoso, abuso, discriminación, comunicación despectiva o degradante o acciones por cualquier participante en nuestra comunidad ya sea virtual, o en las comunicaciones uno-a-uno que se realizan en el contexto de la comunidad. Por favor ser respetuoso con todos.
|
||||
|
||||
El acoso incluye: comentarios nocivos o perjudiciales, verbales o escritos relacionados con el género, la orientación sexual, raza, religión, discapacidad; uso inadecuado de desnudos y / o imágenes sexuales en espacios públicos (incluyendo la presentación diapositivas); intimidación deliberada, acecho o seguimiento; fotografías o grabaciones acosadoras; interrupción sostenida de charlas y otros eventos; contacto físico inapropiado, y atención sexual no deseada.
|
||||
|
||||
## 4. Consecuencias de comportamiento inaceptable
|
||||
Se espera que personas a quienes se les solicite que detengan su comportamiento inaceptable lo hagan de manera inmediata esto será válido para cualquier miembro de la comunidad.
|
||||
|
||||
En caso de presentarse una violación al código de conducta de manera repetida se tendrá cero tolerancia a este comportamiento por parte de los organizadores.
|
||||
|
||||
Si un miembro de la comunidad participa en una conducta inaceptable, los organizadores pueden tomar cualquier acción que consideren apropiada, hasta e incluyendo una prohibición temporal o expulsión permanente de la comunidad, sin previo aviso.
|
||||
|
||||
Priorizamos la seguridad de las personas marginadas sobre la comodidad de las personas privilegiadas. Nos reservamos el derecho de no actuar sobre las quejas relacionadas con:
|
||||
|
||||
- El "racismo inverso", "sexismo inverso" y "cisfobia".
|
||||
- Comunicación razonable de límites, como "déjame en paz", "vete" o "no estoy discutiendo esto contigo".
|
||||
- Al comunicarse en un "tono", no se encuentra agradable.
|
||||
- Identificando comportamientos o suposiciones racistas, sexistas, cissexistas u opresivas.
|
||||
|
||||
## 5. ¿Qué hacer si se incumple el código de conducta?
|
||||
Si eres víctima o testigo de una conducta inaceptable, o tienes cualquier inquietud, por favor comunícate con el equipo organizador lo antes posible:
|
||||
|
||||
- Mensaje directo: Michael Prentice (@splaktar)
|
||||
- Mensaje directo: Jorge Cano (@jorgeucano)
|
||||
- Mensaje directo: Andrés Villanueva (@villanuevand)
|
||||
- Email: [soporte@angular.lat](mailto:soporte@angular.lat)
|
||||
|
||||
### ¿Qué pasa después?
|
||||
- Una vez haya sido notificada el no cumplimiento de la norma, el equipo de liderazgo se reunirán y analizarán el caso.
|
||||
- Los incidentes presentados se manejarán con discreción.
|
||||
- Los organizadores se comunicarán con la persona que incumplió la norma y tomarán las medidas respectivas.
|
||||
- Se realizará un acompañamiento a la persona agredida y se apoyará.
|
||||
- Si el incidente se hace público, Angular Hispano no se hace responsable de los perjuicios que esto pueda ocasionar en el agresor o el agredido.
|
||||
|
||||
## 6. Alcance
|
||||
Esperamos que todos los participantes de la comunidad (organizadores, voluntarios, contribuyentes pagados o de otro modo; patrocinadores; y otros invitados) se atengan a este Código de Conducta en todos los espacios virtuales y presenciales, así como en todas las comunicaciones de uno-a-uno pertinentes la comunidad.
|
||||
|
||||
### Colaboradores
|
||||
|
||||
- Alejandra Giraldo
|
||||
- Vanessa Marely
|
||||
- Mariano Alvarez
|
||||
- Andrés Villanueva
|
||||
- Michael Prentice
|
||||
- javascript&friends
|
||||
- Angular
|
||||
- She Codes Angular
|
||||
- Colombia dev
|
@ -1,359 +0,0 @@
|
||||
# Contributing to Angular
|
||||
|
||||
We would love for you to contribute to Angular and help make it even better than it is today!
|
||||
As a contributor, here are the guidelines we would like you to follow:
|
||||
|
||||
- [Code of Conduct](#coc)
|
||||
- [Question or Problem?](#question)
|
||||
- [Issues and Bugs](#issue)
|
||||
- [Feature Requests](#feature)
|
||||
- [Submission Guidelines](#submit)
|
||||
- [Coding Rules](#rules)
|
||||
- [Commit Message Guidelines](#commit)
|
||||
- [Signing the CLA](#cla)
|
||||
|
||||
|
||||
## <a name="coc"></a> Code of Conduct
|
||||
|
||||
Help us keep Angular open and inclusive.
|
||||
Please read and follow our [Code of Conduct][coc].
|
||||
|
||||
|
||||
## <a name="question"></a> Got a Question or Problem?
|
||||
|
||||
Do not open issues for general support questions as we want to keep GitHub issues for bug reports and feature requests.
|
||||
Instead, we recommend using [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) to ask support-related questions. When creating a new question on Stack Overflow, make sure to add the `angular` tag.
|
||||
|
||||
Stack Overflow is a much better place to ask questions since:
|
||||
|
||||
- there are thousands of people willing to help on Stack Overflow
|
||||
- questions and answers stay available for public viewing so your question/answer might help someone else
|
||||
- Stack Overflow's voting system assures that the best answers are prominently visible.
|
||||
|
||||
To save your and our time, we will systematically close all issues that are requests for general support and redirect people to Stack Overflow.
|
||||
|
||||
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
||||
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
|
||||
If you find a bug in the source code, you can help us by [submitting an issue](#submit-issue) to our [GitHub Repository][github].
|
||||
Even better, you can [submit a Pull Request](#submit-pr) with a fix.
|
||||
|
||||
|
||||
## <a name="feature"></a> Missing a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our GitHub Repository.
|
||||
If you would like to *implement* a new feature, please consider the size of the change in order to determine the right steps to proceed:
|
||||
|
||||
* For a **Major Feature**, first open an issue and outline your proposal so that it can be discussed.
|
||||
This process allows us to better coordinate our efforts, prevent duplication of work, and help you to craft the change so that it is successfully accepted into the project.
|
||||
|
||||
**Note**: Adding a new topic to the documentation, or significantly re-writing a topic, counts as a major feature.
|
||||
|
||||
* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
|
||||
|
||||
|
||||
## <a name="submit"></a> Submission Guidelines
|
||||
|
||||
|
||||
### <a name="submit-issue"></a> Submitting an Issue
|
||||
|
||||
Before you submit an issue, please search the issue tracker, maybe an issue for your problem already exists and the discussion might inform you of workarounds readily available.
|
||||
|
||||
We want to fix all the issues as soon as possible, but before fixing a bug we need to reproduce and confirm it.
|
||||
In order to reproduce bugs, we require that you provide a minimal reproduction.
|
||||
Having a minimal reproducible scenario gives us a wealth of important information without going back and forth to you with additional questions.
|
||||
|
||||
A minimal reproduction allows us to quickly confirm a bug (or point out a coding problem) as well as confirm that we are fixing the right problem.
|
||||
|
||||
We require a minimal reproduction to save maintainers' time and ultimately be able to fix more bugs.
|
||||
Often, developers find coding problems themselves while preparing a minimal reproduction.
|
||||
We understand that sometimes it might be hard to extract essential bits of code from a larger codebase but we really need to isolate the problem before we can fix it.
|
||||
|
||||
Unfortunately, we are not able to investigate / fix bugs without a minimal reproduction, so if we don't hear back from you, we are going to close an issue that doesn't have enough info to be reproduced.
|
||||
|
||||
You can file new issues by selecting from our [new issue templates](https://github.com/angular/angular/issues/new/choose) and filling out the issue template.
|
||||
|
||||
|
||||
### <a name="submit-pr"></a> Submitting a Pull Request (PR)
|
||||
|
||||
Before you submit your Pull Request (PR) consider the following guidelines:
|
||||
|
||||
1. Search [GitHub](https://github.com/angular/angular/pulls) for an open or closed PR that relates to your submission.
|
||||
You don't want to duplicate existing efforts.
|
||||
|
||||
2. Be sure that an issue describes the problem you're fixing, or documents the design for the feature you'd like to add.
|
||||
Discussing the design upfront helps to ensure that we're ready to accept your work.
|
||||
|
||||
3. Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without a signed CLA.
|
||||
Make sure you author all contributed Git commits with email address associated with your CLA signature.
|
||||
|
||||
4. Fork the angular/angular repo.
|
||||
|
||||
5. Make your changes in a new git branch:
|
||||
|
||||
```shell
|
||||
git checkout -b my-fix-branch master
|
||||
```
|
||||
|
||||
6. Create your patch, **including appropriate test cases**.
|
||||
|
||||
7. Follow our [Coding Rules](#rules).
|
||||
|
||||
8. Run the full Angular test suite, as described in the [developer documentation][dev-doc], and ensure that all tests pass.
|
||||
|
||||
9. Commit your changes using a descriptive commit message that follows our [commit message conventions](#commit).
|
||||
Adherence to these conventions is necessary because release notes are automatically generated from these messages.
|
||||
|
||||
```shell
|
||||
git commit -a
|
||||
```
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
|
||||
10. Push your branch to GitHub:
|
||||
|
||||
```shell
|
||||
git push origin my-fix-branch
|
||||
```
|
||||
|
||||
11. In GitHub, send a pull request to `angular:master`.
|
||||
|
||||
If we ask for changes via code reviews then:
|
||||
|
||||
* Make the required updates.
|
||||
* Re-run the Angular test suites to ensure tests are still passing.
|
||||
* Rebase your branch and force push to your GitHub repository (this will update your Pull Request):
|
||||
|
||||
```shell
|
||||
git rebase master -i
|
||||
git push -f
|
||||
```
|
||||
|
||||
That's it! Thank you for your contribution!
|
||||
|
||||
|
||||
#### After your pull request is merged
|
||||
|
||||
After your pull request is merged, you can safely delete your branch and pull the changes from the main (upstream) repository:
|
||||
|
||||
* Delete the remote branch on GitHub either through the GitHub web UI or your local shell as follows:
|
||||
|
||||
```shell
|
||||
git push origin --delete my-fix-branch
|
||||
```
|
||||
|
||||
* Check out the master branch:
|
||||
|
||||
```shell
|
||||
git checkout master -f
|
||||
```
|
||||
|
||||
* Delete the local branch:
|
||||
|
||||
```shell
|
||||
git branch -D my-fix-branch
|
||||
```
|
||||
|
||||
* Update your master with the latest upstream version:
|
||||
|
||||
```shell
|
||||
git pull --ff upstream master
|
||||
```
|
||||
|
||||
|
||||
## <a name="rules"></a> Coding Rules
|
||||
To ensure consistency throughout the source code, keep these rules in mind as you are working:
|
||||
|
||||
* All features or bug fixes **must be tested** by one or more specs (unit-tests).
|
||||
* All public API methods **must be documented**.
|
||||
* We follow [Google's JavaScript Style Guide][js-style-guide], but wrap all code at **100 characters**.
|
||||
|
||||
An automated formatter is available, see [DEVELOPER.md](docs/DEVELOPER.md#clang-format).
|
||||
|
||||
|
||||
## <a name="commit"></a> Commit Message Format
|
||||
|
||||
*This specification is inspired and supersedes the [AngularJS commit message format][commit-message-format].*
|
||||
|
||||
We have very precise rules over how our Git commit messages must be formatted.
|
||||
This format leads to **easier to read commit history**.
|
||||
|
||||
Each commit message consists of a **header**, a **body**, and a **footer**.
|
||||
|
||||
|
||||
```
|
||||
<header>
|
||||
<BLANK LINE>
|
||||
<body>
|
||||
<BLANK LINE>
|
||||
<footer>
|
||||
```
|
||||
|
||||
The `header` is mandatory and must conform to the [Commit Message Header](#commit-header) format.
|
||||
|
||||
The `body` is mandatory for all commits except for those of scope "docs".
|
||||
When the body is required it must be at least 20 characters long.
|
||||
|
||||
The `footer` is optional.
|
||||
|
||||
Any line of the commit message cannot be longer than 100 characters.
|
||||
|
||||
|
||||
#### <a href="commit-header"></a>Commit Message Header
|
||||
|
||||
```
|
||||
<type>(<scope>): <short summary>
|
||||
│ │ │
|
||||
│ │ └─⫸ Summary in present tense. Not capitalized. No period at the end.
|
||||
│ │
|
||||
│ └─⫸ Commit Scope: animations|bazel|benchpress|common|compiler|compiler-cli|core|
|
||||
│ elements|forms|http|language-service|localize|platform-browser|
|
||||
│ platform-browser-dynamic|platform-server|platform-webworker|
|
||||
│ platform-webworker-dynamic|router|service-worker|upgrade|zone.js|
|
||||
│ packaging|changelog|dev-infra|docs-infra|migrations|ngcc|ve
|
||||
│
|
||||
└─⫸ Commit Type: build|ci|docs|feat|fix|perf|refactor|style|test
|
||||
```
|
||||
|
||||
The `<type>` and `<summary>` fields are mandatory, the `(<scope>)` field is optional.
|
||||
|
||||
|
||||
##### Type
|
||||
|
||||
Must be one of the following:
|
||||
|
||||
* **build**: Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)
|
||||
* **ci**: Changes to our CI configuration files and scripts (example scopes: Circle, BrowserStack, SauceLabs)
|
||||
* **docs**: Documentation only changes
|
||||
* **feat**: A new feature
|
||||
* **fix**: A bug fix
|
||||
* **perf**: A code change that improves performance
|
||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
* **test**: Adding missing tests or correcting existing tests
|
||||
|
||||
|
||||
##### Scope
|
||||
The scope should be the name of the npm package affected (as perceived by the person reading the changelog generated from commit messages).
|
||||
|
||||
The following is the list of supported scopes:
|
||||
|
||||
* `animations`
|
||||
* `bazel`
|
||||
* `benchpress`
|
||||
* `common`
|
||||
* `compiler`
|
||||
* `compiler-cli`
|
||||
* `core`
|
||||
* `elements`
|
||||
* `forms`
|
||||
* `http`
|
||||
* `language-service`
|
||||
* `localize`
|
||||
* `platform-browser`
|
||||
* `platform-browser-dynamic`
|
||||
* `platform-server`
|
||||
* `platform-webworker`
|
||||
* `platform-webworker-dynamic`
|
||||
* `router`
|
||||
* `service-worker`
|
||||
* `upgrade`
|
||||
* `zone.js`
|
||||
|
||||
There are currently a few exceptions to the "use package name" rule:
|
||||
|
||||
* `packaging`: used for changes that change the npm package layout in all of our packages, e.g. public path changes, package.json changes done to all packages, d.ts file/format changes, changes to bundles, etc.
|
||||
|
||||
* `changelog`: used for updating the release notes in CHANGELOG.md
|
||||
|
||||
* `dev-infra`: used for dev-infra related changes within the directories /scripts, /tools and /dev-infra
|
||||
|
||||
* `docs-infra`: used for docs-app (angular.io) related changes within the /aio directory of the repo
|
||||
|
||||
* `migrations`: used for changes to the `ng update` migrations.
|
||||
|
||||
* `ngcc`: used for changes to the [Angular Compatibility Compiler](./packages/compiler-cli/ngcc/README.md)
|
||||
|
||||
* `ve`: used for changes specific to ViewEngine (legacy compiler/renderer).
|
||||
|
||||
* none/empty string: useful for `style`, `test` and `refactor` changes that are done across all packages (e.g. `style: add missing semicolons`) and for docs changes that are not related to a specific package (e.g. `docs: fix typo in tutorial`).
|
||||
|
||||
|
||||
##### Summary
|
||||
|
||||
Use the summary field to provide a succinct description of the change:
|
||||
|
||||
* use the imperative, present tense: "change" not "changed" nor "changes"
|
||||
* don't capitalize the first letter
|
||||
* no dot (.) at the end
|
||||
|
||||
|
||||
#### Commit Message Body
|
||||
|
||||
Just as in the summary, use the imperative, present tense: "fix" not "fixed" nor "fixes".
|
||||
|
||||
Explain the motivation for the change in the commit message body. This commit message should explain _why_ you are making the change.
|
||||
You can include a comparison of the previous behavior with the new behavior in order to illustrate the impact of the change.
|
||||
|
||||
|
||||
#### Commit Message Footer
|
||||
|
||||
The footer can contain information about breaking changes and is also the place to reference GitHub issues, Jira tickets, and other PRs that this commit closes or is related to.
|
||||
|
||||
```
|
||||
BREAKING CHANGE: <breaking change summary>
|
||||
<BLANK LINE>
|
||||
<breaking change description + migration instructions>
|
||||
<BLANK LINE>
|
||||
<BLANK LINE>
|
||||
Fixes #<issue number>
|
||||
```
|
||||
|
||||
Breaking Change section should start with the phrase "BREAKING CHANGE: " followed by a summary of the breaking change, a blank line, and a detailed description of the breaking change that also includes migration instructions.
|
||||
|
||||
|
||||
### Revert commits
|
||||
|
||||
If the commit reverts a previous commit, it should begin with `revert: `, followed by the header of the reverted commit.
|
||||
|
||||
The content of the commit message body should contain:
|
||||
|
||||
- information about the SHA of the commit being reverted in the following format: `This reverts commit <SHA>`,
|
||||
- a clear description of the reason for reverting the commit message.
|
||||
|
||||
|
||||
## <a name="cla"></a> Signing the CLA
|
||||
|
||||
Please sign our Contributor License Agreement (CLA) before sending pull requests. For any code
|
||||
changes to be accepted, the CLA must be signed. It's a quick process, we promise!
|
||||
|
||||
* For individuals, we have a [simple click-through form][individual-cla].
|
||||
* For corporations, we'll need you to
|
||||
[print, sign and one of scan+email, fax or mail the form][corporate-cla].
|
||||
|
||||
If you have more than one GitHub accounts, or multiple email addresses associated with a single GitHub account, you must sign the CLA using the primary email address of the GitHub account used to author Git commits and send pull requests.
|
||||
|
||||
The following documents can help you sort out issues with GitHub accounts and multiple email addresses:
|
||||
|
||||
* https://help.github.com/articles/setting-your-commit-email-address-in-git/
|
||||
* https://stackoverflow.com/questions/37245303/what-does-usera-committed-with-userb-13-days-ago-on-github-mean
|
||||
* https://help.github.com/articles/about-commit-email-addresses/
|
||||
* https://help.github.com/articles/blocking-command-line-pushes-that-expose-your-personal-email-address/
|
||||
|
||||
|
||||
|
||||
|
||||
[angular-group]: https://groups.google.com/forum/#!forum/angular
|
||||
[coc]: https://github.com/angular/code-of-conduct/blob/master/CODE_OF_CONDUCT.md
|
||||
[commit-message-format]: https://docs.google.com/document/d/1QrDFcIiPjSLDn3EL15IJygNPiHORgU1_OOAqWjiDU5Y/edit#
|
||||
[corporate-cla]: http://code.google.com/legal/corporate-cla-v1.0.html
|
||||
[dev-doc]: https://github.com/angular/angular/blob/master/docs/DEVELOPER.md
|
||||
[github]: https://github.com/angular/angular
|
||||
[gitter]: https://gitter.im/angular/angular
|
||||
[individual-cla]: http://code.google.com/legal/individual-cla-v1.0.html
|
||||
[js-style-guide]: https://google.github.io/styleguide/jsguide.html
|
||||
[jsfiddle]: http://jsfiddle.net
|
||||
[plunker]: http://plnkr.co/edit
|
||||
[runnable]: http://runnable.com
|
||||
[stackoverflow]: http://stackoverflow.com/questions/tagged/angular
|
438
CONTRIBUTING.md
438
CONTRIBUTING.md
@ -1,340 +1,276 @@
|
||||
# Contribuye a Angular
|
||||
# Contributing to Angular
|
||||
|
||||
¡Nos encantaría que contribuyeras a Angular y que ayudaras a hacerlo aún mejor de lo que es hoy!
|
||||
Como colaborador, estos son los lineamientos que nos gustaría que siguieras:
|
||||
We would love for you to contribute to Angular and help make it even better than it is
|
||||
today! As a contributor, here are the guidelines we would like you to follow:
|
||||
|
||||
- [Código de conducta](#coc)
|
||||
- [¿Preguntas o problemas?](#question)
|
||||
- [_Issues_ y _bugs_](#issue)
|
||||
- [Solicitud de funcionalidades](#feature)
|
||||
- [Guía para la creación de issues y PRs](#submit)
|
||||
- [Reglas del código](#rules)
|
||||
- [Convención para el mensaje de los _commits_](#commit)
|
||||
- [Firma del Acuerdo de Licencia de Colaborador (CLA)](#cla)
|
||||
- [Code of Conduct](#coc)
|
||||
- [Question or Problem?](#question)
|
||||
- [Issues and Bugs](#issue)
|
||||
- [Feature Requests](#feature)
|
||||
- [Submission Guidelines](#submit)
|
||||
- [Coding Rules](#rules)
|
||||
- [Commit Message Guidelines](#commit)
|
||||
- [Signing the CLA](#cla)
|
||||
|
||||
## <a name="coc"></a> Code of Conduct
|
||||
Help us keep Angular open and inclusive. Please read and follow our [Code of Conduct][coc].
|
||||
|
||||
## <a name="question"></a> Got a Question or Problem?
|
||||
|
||||
Do not open issues for general support questions as we want to keep GitHub issues for bug reports and feature requests. You've got much better chances of getting your question answered on [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) where the questions should be tagged with tag `angular`.
|
||||
|
||||
Stack Overflow is a much better place to ask questions since:
|
||||
|
||||
- there are thousands of people willing to help on Stack Overflow
|
||||
- questions and answers stay available for public viewing so your question / answer might help someone else
|
||||
- Stack Overflow's voting system assures that the best answers are prominently visible.
|
||||
|
||||
To save your and our time, we will systematically close all issues that are requests for general support and redirect people to Stack Overflow.
|
||||
|
||||
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
If you find a bug in the source code, you can help us by
|
||||
[submitting an issue](#submit-issue) to our [GitHub Repository][github]. Even better, you can
|
||||
[submit a Pull Request](#submit-pr) with a fix.
|
||||
|
||||
## <a name="feature"></a> Missing a Feature?
|
||||
You can *request* a new feature by [submitting an issue](#submit-issue) to our GitHub
|
||||
Repository. If you would like to *implement* a new feature, please submit an issue with
|
||||
a proposal for your work first, to be sure that we can use it.
|
||||
Please consider what kind of change it is:
|
||||
|
||||
* For a **Major Feature**, first open an issue and outline your proposal so that it can be
|
||||
discussed. This will also allow us to better coordinate our efforts, prevent duplication of work,
|
||||
and help you to craft the change so that it is successfully accepted into the project.
|
||||
* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
|
||||
|
||||
## <a name="submit"></a> Submission Guidelines
|
||||
|
||||
### <a name="submit-issue"></a> Submitting an Issue
|
||||
|
||||
Before you submit an issue, please search the issue tracker, maybe an issue for your problem already exists and the discussion might inform you of workarounds readily available.
|
||||
|
||||
We want to fix all the issues as soon as possible, but before fixing a bug we need to reproduce and confirm it. In order to reproduce bugs we will systematically ask you to provide a minimal reproduction scenario using http://plnkr.co. Having a live, reproducible scenario gives us wealth of important information without going back & forth to you with additional questions like:
|
||||
|
||||
- version of Angular used
|
||||
- 3rd-party libraries and their versions
|
||||
- and most importantly - a use-case that fails
|
||||
|
||||
A minimal reproduce scenario using http://plnkr.co/ allows us to quickly confirm a bug (or point out coding problem) as well as confirm that we are fixing the right problem. If plunker is not a suitable way to demonstrate the problem (for example for issues related to our npm packaging), please create a standalone git repository demonstrating the problem.
|
||||
|
||||
We will be insisting on a minimal reproduce scenario in order to save maintainers time and ultimately be able to fix more bugs. Interestingly, from our experience users often find coding problems themselves while preparing a minimal plunk. We understand that sometimes it might be hard to extract essentials bits of code from a larger code-base but we really need to isolate the problem before we can fix it.
|
||||
|
||||
Unfortunately we are not able to investigate / fix bugs without a minimal reproduction, so if we don't hear back from you we are going to close an issue that don't have enough info to be reproduced.
|
||||
|
||||
You can file new issues by filling out our [new issue form](https://github.com/angular/angular/issues/new).
|
||||
|
||||
|
||||
## <a name="coc"></a> Código de conducta
|
||||
### <a name="submit-pr"></a> Submitting a Pull Request (PR)
|
||||
Before you submit your Pull Request (PR) consider the following guidelines:
|
||||
|
||||
Ayúdanos a mantener Angular abierto e inclusivo.
|
||||
Por favor lee y sigue nuestro [Código de conducta][coc].
|
||||
|
||||
|
||||
## <a name="question"></a> ¿Tienes alguna pregunta o problema?
|
||||
|
||||
No abras *issues* para preguntas de soporte general ya que queremos mantener los *issues* de GitHub para reporte de *bugs* y solicitud de funcionalidades.
|
||||
En su lugar, recomendamos utilizar [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) para hacer preguntas relacionadas con soporte. Al crear una nueva pregunta en Stack Overflow, asegúrate de agregar el etiqueta (tag) de `angular`.
|
||||
|
||||
Stack Overflow es mucho mejor para hacer preguntas ya que:
|
||||
|
||||
- Hay miles de personas dispuestas a ayudar en preguntas y respuestas de Stack Overflow
|
||||
que permanecen disponibles para el público, por lo que tu pregunta o respuesta podría ayudar a otra persona.
|
||||
- El sistema de votación de Stack Overflow asegura que las mejores respuestas sobresalgan y sean visibles.
|
||||
|
||||
Para ahorrar tu tiempo y el nuestro, cerraremos sistemáticamente todos los *issues* que sean solicitudes de soporte general y redirigiremos a las personas a Stack Overflow.
|
||||
|
||||
Si deseas chatear sobre alguna pregunta en tiempo real, puedes hacerlo a través de nuestro [canal de Gitter][gitter].
|
||||
|
||||
|
||||
## <a name="issue"></a> ¿Encontraste un Bug?
|
||||
|
||||
Si encontraste un error en el código fuente, puedes ayudarnos [creando un *issue*](#submit-issue) en nuestro [repositorio de GitHub][github].
|
||||
O incluso mejor, puedes [crear un *Pull Request*](#submit-pr) con la solución.
|
||||
|
||||
|
||||
## <a name="feature"></a> ¿Falta alguna funcionalidad?
|
||||
Puedes solicitar una nueva funcionalidad [creando un *issue*](#submit-issue) en nuestro repositorio de GitHub.
|
||||
Si deseas implementar una nueva funcionalidad, por favor considera el tamaño del cambio para determinar los pasos correctos para continuar:
|
||||
|
||||
* Para un **cambio significativo**, primero abre un *issue* y describe tu propuesta para que pueda ser discutida.
|
||||
Este proceso nos permite coordinar mejor nuestros esfuerzos, evitar trabajo duplicado y ayudarte a diseñar el cambio para que sea aceptado con éxito en el proyecto.
|
||||
|
||||
**Nota**: Agregar un nuevo tema a la documentación o reescribir significativamente un tema, también cuenta como *cambio significativo*.
|
||||
|
||||
* **Cambios pequeños** pueden ser elaborados y directamente [creados como un _pull request_](#submit-pr).
|
||||
|
||||
|
||||
## <a name="submit"></a> Guía para la creación de issues y PRs
|
||||
|
||||
|
||||
### <a name="submit-issue"></a> Creación de _issues_
|
||||
|
||||
Antes de crear un *issue*, por favor busca en el el *issue tracker*, quizá un *issue* para tu problema ya existe y la discusión puede informarte sobre soluciones alternativas disponibles.
|
||||
|
||||
Queremos solucionar todos los problemas lo antes posible, pero antes de corregir un bug necesitamos reproducirlo y confirmarlo.
|
||||
Para reproducir errores, requerimos que proporciones una reproducción mínima.
|
||||
Tener un escenario reproducible mínimo nos brinda una gran cantidad de información importante sin tener que ir y venir con preguntas adicionales.
|
||||
|
||||
Una reproducción mínima nos permite confirmar rápidamente un bug (o señalar un problema de código), así también confirmar que estamos solucionando el problema correcto.
|
||||
|
||||
Requerimos una reproducción mínima para ahorrar tiempo a los encargados del mantenimiento y en última instancia, poder corregir más bugs.
|
||||
A menudo los desarrolladores encuentran problemas de código mientras preparan una reproducción mínima.
|
||||
Entendemos que a veces puede ser difícil extraer porciones esenciales de código de un código más grande, pero realmente necesitamos aislar el problema antes de poder solucionarlo.
|
||||
|
||||
Desafortunadamente no podemos investigar/corregir errores sin una reproducción mínima, por lo que si no tenemos tu retroalimentación del bug, vamos a cerrar el *issue* ya que no tiene suficiente información para reproducirse.
|
||||
|
||||
Puedes presentar nuevos *issues* seleccionando nuestra [plantilla de _issues_](https://github.com/angular/angular/issues/new/choose) y complentando la plantilla.
|
||||
|
||||
|
||||
### <a name="submit-pr"></a> Creación de un Pull Requests (PR)
|
||||
|
||||
Antes de crear tu Pull Request (PR) considera los siguientes lineamientos:
|
||||
|
||||
1. Busca en [GitHub](https://github.com/angular/angular/pulls) PRs que estén abiertos o cerrados y que estén relacionados con el que vas a crear.
|
||||
No deseas duplicar los esfuerzos existentes.
|
||||
|
||||
2. Asegúrate de que el PR describa el problema que estás solucionando o que documente el diseño de la funcionalidad que deseas agregar.
|
||||
Discutir el diseño por adelantado ayuda a garantizar que estemos listos para aceptar tu trabajo.
|
||||
|
||||
3. Por favor firma nuestro [Acuerdo de Licencia de Colaborador (CLA)](#cla) antes de crear PRs.
|
||||
No podemos aceptar el código sin el Acuerdo de Licencia de Colaborador (CLA) firmado.
|
||||
Asegúrate de crear todas las contribuciones de Git con la dirección de correo electrónico asociada con tu firma del Acuerdo de Licencia de Colaborador (CLA).
|
||||
|
||||
4. Haz *fork* del repositorio angular/angular.
|
||||
|
||||
5. Haz tus cambios en una nueva rama de Git:
|
||||
1. Search [GitHub](https://github.com/angular/angular/pulls) for an open or closed PR
|
||||
that relates to your submission. You don't want to duplicate effort.
|
||||
1. Please sign our [Contributor License Agreement (CLA)](#cla) before sending PRs.
|
||||
We cannot accept code without this. Make sure you sign with the primary email address of the Git identity that has been granted access to the Angular repository.
|
||||
1. Fork the angular/angular repo.
|
||||
1. Make your changes in a new git branch:
|
||||
|
||||
```shell
|
||||
git checkout -b my-fix-branch master
|
||||
```
|
||||
|
||||
6. Crea tu correción, **incluyendo casos de prueba apropiados**.
|
||||
|
||||
7. Sigue nuestras [Reglas de código](#rules).
|
||||
|
||||
8. Ejecuta todo el conjunto de pruebas de Angular, tal como está descrito en la [documentación del desarrollador][dev-doc], y asegúrate de que todas las pruebas pasen.
|
||||
|
||||
9. Crea un commit de tus cambios utilizando un mensaje de commit descriptivo que siga nuestra [convención para el mensaje de los commits](#commit).
|
||||
Es necesario cumplir con estas convenciones porque las notas de las versiones se generan automáticamente a partir de estos mensajes.
|
||||
1. Create your patch, **including appropriate test cases**.
|
||||
1. Follow our [Coding Rules](#rules).
|
||||
1. Run the full Angular test suite, as described in the [developer documentation][dev-doc],
|
||||
and ensure that all tests pass.
|
||||
1. Commit your changes using a descriptive commit message that follows our
|
||||
[commit message conventions](#commit). Adherence to these conventions
|
||||
is necessary because release notes are automatically generated from these messages.
|
||||
|
||||
```shell
|
||||
git commit -a
|
||||
```
|
||||
Nota: la opción de la línea de comandos de Git `-a` automaticamente hará "add" y "rm" a los archivos editados.
|
||||
Note: the optional commit `-a` command line option will automatically "add" and "rm" edited files.
|
||||
|
||||
10. Haz push de tu rama a GitHub:
|
||||
1. Push your branch to GitHub:
|
||||
|
||||
```shell
|
||||
git push origin my-fix-branch
|
||||
```
|
||||
|
||||
11. En GitHub, crea un pull request a `angular:master`.
|
||||
1. In GitHub, send a pull request to `angular:master`.
|
||||
* If we suggest changes then:
|
||||
* Make the required updates.
|
||||
* Re-run the Angular test suites to ensure tests are still passing.
|
||||
* Rebase your branch and force push to your GitHub repository (this will update your Pull Request):
|
||||
|
||||
Si solicitamos cambios a través de revisiones de código, sigue las siguientes indicaciones:
|
||||
```shell
|
||||
git rebase master -i
|
||||
git push -f
|
||||
```
|
||||
|
||||
* Haz los cambios requeridos.
|
||||
* Ejecuta nuevamente el conjunto de pruebas de Angular para asegurar que todas las pruebas aún están pasando.
|
||||
* Haz rebase de tu rama a la rama master y haz push con la opción `-f` a tu repositorio de Github (esto actualizará tu Pull Request):
|
||||
That's it! Thank you for your contribution!
|
||||
|
||||
```shell
|
||||
git rebase master -i
|
||||
git push -f
|
||||
```
|
||||
#### After your pull request is merged
|
||||
|
||||
¡Es todo! ¡Muchas gracias por tu contribución!
|
||||
After your pull request is merged, you can safely delete your branch and pull the changes
|
||||
from the main (upstream) repository:
|
||||
|
||||
|
||||
#### Después del merge de tu pull request
|
||||
|
||||
Después de que se hizo merge de tu pull request, puedes eliminar de forma segura tu rama y hacer pull de los cambios del repositorio principal (upstream):
|
||||
|
||||
* Elimina la rama remota en GitHub a través de la interfaz de usuario web de GitHub o en tu línea de comandos de la siguiente manera:
|
||||
* Delete the remote branch on GitHub either through the GitHub web UI or your local shell as follows:
|
||||
|
||||
```shell
|
||||
git push origin --delete my-fix-branch
|
||||
```
|
||||
|
||||
* Muévete a la rama master:
|
||||
* Check out the master branch:
|
||||
|
||||
```shell
|
||||
git checkout master -f
|
||||
```
|
||||
|
||||
* Elimina tu rama local:
|
||||
* Delete the local branch:
|
||||
|
||||
```shell
|
||||
git branch -D my-fix-branch
|
||||
```
|
||||
|
||||
* Actualiza tu rama master con la última versión del fork (upstream):
|
||||
* Update your master with the latest upstream version:
|
||||
|
||||
```shell
|
||||
git pull --ff upstream master
|
||||
```
|
||||
|
||||
## <a name="rules"></a> Coding Rules
|
||||
To ensure consistency throughout the source code, keep these rules in mind as you are working:
|
||||
|
||||
## <a name="rules"></a> Reglas del código
|
||||
Para garantizar la coherencia en todo el código fuente, ten en cuenta estas reglas mientras trabajas:
|
||||
* All features or bug fixes **must be tested** by one or more specs (unit-tests).
|
||||
* All public API methods **must be documented**. (Details TBC).
|
||||
* We follow [Google's JavaScript Style Guide][js-style-guide], but wrap all code at
|
||||
**100 characters**. An automated formatter is available, see
|
||||
[DEVELOPER.md](docs/DEVELOPER.md#clang-format).
|
||||
|
||||
* Todas las funcionalidades o solución de bugs **deben ser probadas** por una o más pruebas (pruebas unitarias).
|
||||
* Todos los métodos públicos del API **deben ser documentados**.
|
||||
* Seguimos la [guía de estilo JavaScript de Google][js-style-guide], pero cada línea no debe exceder **100 caracteres**.
|
||||
## <a name="commit"></a> Commit Message Guidelines
|
||||
|
||||
Un formateador automatizado está disponible, revisar [DEVELOPER.md](docs/DEVELOPER.md#clang-format).
|
||||
|
||||
|
||||
## <a name="commit"></a> Formato para el mensaje de los commits
|
||||
|
||||
*Esta especificación está inspirada y reemplaza el [Formato de mensaje de commits de AngularJS][commit-message-format].*
|
||||
|
||||
Tenemos reglas muy precisas sobre cómo deben formatearse nuestros mensajes de los commits de Git.
|
||||
Este formato permite tener **un historial de commits más facil de leer**.
|
||||
|
||||
Cada mensaje de un commit consta del **header**, el **body**, y el **footer**.
|
||||
We have very precise rules over how our git commit messages can be formatted. This leads to **more
|
||||
readable messages** that are easy to follow when looking through the **project history**. But also,
|
||||
we use the git commit messages to **generate the Angular change log**.
|
||||
|
||||
### Commit Message Format
|
||||
Each commit message consists of a **header**, a **body** and a **footer**. The header has a special
|
||||
format that includes a **type**, a **scope** and a **subject**:
|
||||
|
||||
```
|
||||
<header>
|
||||
<LINEA VACIA>
|
||||
<type>(<scope>): <subject>
|
||||
<BLANK LINE>
|
||||
<body>
|
||||
<LINEA VACIA>
|
||||
<BLANK LINE>
|
||||
<footer>
|
||||
```
|
||||
|
||||
El `header` es obligatorio y debe ajustarse al formato del [mensaje del header del commit](#commit-header).
|
||||
The **header** is mandatory and the **scope** of the header is optional.
|
||||
|
||||
El `body` es obligatorio para todos los commits excepto los que tenga scope "docs".
|
||||
Cuando el body es requerido debe tener al menos 20 caracteres.
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier
|
||||
to read on GitHub as well as in various git tools.
|
||||
|
||||
El `footer` es opcional.
|
||||
Footer should contain a [closing reference to an issue](https://help.github.com/articles/closing-issues-via-commit-messages/) if any.
|
||||
|
||||
Cualquier línea del mensaje del commit no puede tener más de 100 caracteres.
|
||||
|
||||
|
||||
#### <a href="commit-header"></a>Mensaje del header del commit
|
||||
Samples: (even more [samples](https://github.com/angular/angular/commits/master))
|
||||
|
||||
```
|
||||
<tipo>(<alcance>): <resumen>
|
||||
│ │ │
|
||||
│ │ └─⫸ Resumen corto escrito en modo imperativo, tiempo presente. Sin mayúsculas. Sin punto final.
|
||||
│ │
|
||||
│ └─⫸ Alcance del commit: animations|bazel|benchpress|common|compiler|compiler-cli|core|
|
||||
│ elements|forms|http|language-service|localize|platform-browser|
|
||||
│ platform-browser-dynamic|platform-server|platform-webworker|
|
||||
│ platform-webworker-dynamic|router|service-worker|upgrade|zone.js|
|
||||
│ packaging|changelog|dev-infra|docs-infra|migrations|ngcc|ve
|
||||
│
|
||||
└─⫸ Tipo de commit: build|ci|docs|feat|fix|perf|refactor|style|test
|
||||
docs(changelog): update change log to beta.5
|
||||
```
|
||||
```
|
||||
fix(release): need to depend on latest rxjs and zone.js
|
||||
|
||||
The version in our package.json gets copied to the one we publish, and users need the latest of these.
|
||||
```
|
||||
|
||||
El `<tipo>` y `<resumen>` son obligatorios, el `(<alcance>)` es opcional.
|
||||
### Revert
|
||||
If the commit reverts a previous commit, it should begin with `revert: `, followed by the header of the reverted commit. In the body it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit being reverted.
|
||||
|
||||
### Type
|
||||
Must be one of the following:
|
||||
|
||||
##### Tipo
|
||||
* **build**: Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)
|
||||
* **ci**: Changes to our CI configuration files and scripts (example scopes: Travis, Circle, BrowserStack, SauceLabs)
|
||||
* **docs**: Documentation only changes
|
||||
* **feat**: A new feature
|
||||
* **fix**: A bug fix
|
||||
* **perf**: A code change that improves performance
|
||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
* **test**: Adding missing tests or correcting existing tests
|
||||
|
||||
El tipo debe ser uno de los siguientes:
|
||||
### Scope
|
||||
The scope should be the name of the npm package affected (as perceived by person reading changelog generated from commit messages.
|
||||
|
||||
* **build**: cambios que afectan el sistema de compilación o dependencias externas (ejemplos de scopes: gulp, broccoli, npm)
|
||||
* **ci**: cambios en nuestros archivos y scripts de configuración de CI (ejemplos de scopes: Circle, BrowserStack, SauceLabs)
|
||||
* **docs**: cambios en la documentación
|
||||
* **feat**: una nueva funcionalidad
|
||||
* **fix**: una solución de un bug
|
||||
* **perf**: un cambio de código que mejora el rendimiento.
|
||||
* **refactor**: un cambio de código que no corrige ningún error ni agrega ninguna funcionalidad
|
||||
* **style**: cambios que no afectan el significado del código (espacios en blanco, formato, falta de punto y coma, etc.)
|
||||
* **test**: se agregan pruebas faltantes o se corrigen pruebas existentes
|
||||
The following is the list of supported scopes:
|
||||
|
||||
* **animations**
|
||||
* **common**
|
||||
* **compiler**
|
||||
* **compiler-cli**
|
||||
* **core**
|
||||
* **forms**
|
||||
* **http**
|
||||
* **language-service**
|
||||
* **platform-browser**
|
||||
* **platform-browser-dynamic**
|
||||
* **platform-server**
|
||||
* **platform-webworker**
|
||||
* **platform-webworker-dynamic**
|
||||
* **router**
|
||||
* **service-worker**
|
||||
* **upgrade**
|
||||
|
||||
##### Alcance
|
||||
El alcance debe ser el nombre del paquete npm afectado (tal como lo percibe la persona que lee el registro de cambios generado a partir de los mensajes de commit).
|
||||
There are currently a few exceptions to the "use package name" rule:
|
||||
|
||||
La siguiente es la lista de alcances permitidos:
|
||||
* **packaging**: used for changes that change the npm package layout in all of our packages, e.g. public path changes, package.json changes done to all packages, d.ts file/format changes, changes to bundles, etc.
|
||||
* **changelog**: used for updating the release notes in CHANGELOG.md
|
||||
* **aio**: used for docs-app (angular.io) related changes within the /aio directory of the repo
|
||||
* none/empty string: useful for `style`, `test` and `refactor` changes that are done across all packages (e.g. `style: add missing semicolons`)
|
||||
|
||||
* `animations`
|
||||
* `bazel`
|
||||
* `benchpress`
|
||||
* `common`
|
||||
* `compiler`
|
||||
* `compiler-cli`
|
||||
* `core`
|
||||
* `elements`
|
||||
* `forms`
|
||||
* `http`
|
||||
* `language-service`
|
||||
* `localize`
|
||||
* `platform-browser`
|
||||
* `platform-browser-dynamic`
|
||||
* `platform-server`
|
||||
* `platform-webworker`
|
||||
* `platform-webworker-dynamic`
|
||||
* `router`
|
||||
* `service-worker`
|
||||
* `upgrade`
|
||||
* `zone.js`
|
||||
### Subject
|
||||
The subject contains succinct description of the change:
|
||||
|
||||
Actualmente hay algunas excepciones a la regla "usar el nombre de paquete":
|
||||
* use the imperative, present tense: "change" not "changed" nor "changes"
|
||||
* don't capitalize first letter
|
||||
* no dot (.) at the end
|
||||
|
||||
* `packaging`: usado para cambios que cambian el diseño de los paquetes de npm en todos nuestros paquetes. Ejemplos: cambios de la ruta públic, package.json cambios hechos a todos los paquetes, cambios a archivos o formatos d.ts, cambios a bundles, etc.
|
||||
### Body
|
||||
Just as in the **subject**, use the imperative, present tense: "change" not "changed" nor "changes".
|
||||
The body should include the motivation for the change and contrast this with previous behavior.
|
||||
|
||||
* `changelog`: utilizado para actualizar las notas de la versión en CHANGELOG.md
|
||||
### Footer
|
||||
The footer should contain any information about **Breaking Changes** and is also the place to
|
||||
reference GitHub issues that this commit **Closes**.
|
||||
|
||||
* `dev-infra`: utilizado para cambios relacionados con dev-infra dentro de los directorios /scripts, /tools y /dev-infra
|
||||
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this.
|
||||
|
||||
* `docs-infra`: utilizado para cambios relacionados con la documentación (angular.io) dentro del directorio /aio del repositorio
|
||||
A detailed explanation can be found in this [document][commit-message-format].
|
||||
|
||||
* `migrations`: utilizado para los cambios en las migraciones `ng update`.
|
||||
## <a name="cla"></a> Signing the CLA
|
||||
|
||||
* `ngcc`: usado para los cambios del [Compilador de compatibilidad de Angular](./packages/compiler-cli/ngcc/README.md)
|
||||
Please sign our Contributor License Agreement (CLA) before sending pull requests. For any code
|
||||
changes to be accepted, the CLA must be signed. It's a quick process, we promise!
|
||||
|
||||
* `ve`: utilizado para cambios específicos de ViewEngine (legacy compiler/renderer).
|
||||
* For individuals we have a [simple click-through form][individual-cla].
|
||||
* For corporations we'll need you to
|
||||
[print, sign and one of scan+email, fax or mail the form][corporate-cla].
|
||||
|
||||
* alcance vacío: útil para cambios de `style`, `test` y `refactor` que se realizan en todos los paquetes (ejemplo: `style: add missing semicolons`) y para cambios de la documentación que no están relacionados a un paquete en específico(ejemplo: `docs: corrige error gramatical en el tutorial`).
|
||||
<hr>
|
||||
|
||||
|
||||
##### Resumen
|
||||
|
||||
Usa el campo resumen para proporcionar una descripción breve del cambio:
|
||||
|
||||
* usa el modo imperativo, tiempo presente: "cambia" no "cambió" o "cambios"
|
||||
* no debe de contener ninguna letra mayúscula
|
||||
* no debe de conter punto (.) al final
|
||||
|
||||
|
||||
#### Mensaje del cuerpo del commit
|
||||
|
||||
Tal como en el resumen, usa el modo imperativo, tiempo presente: "cambia" no "cambió" o "cambios".
|
||||
|
||||
Explica la razón del cambio en el el mensaje del cuerpo del commit. Este mensaje de confirmación debe explicar _por qué_ está realizando el cambio.
|
||||
Puedes incluir una comparación del comportamiento anterior con el nuevo comportamiento para ilustrar el impacto del cambio.
|
||||
|
||||
|
||||
#### Mensaje del footer del commit
|
||||
|
||||
El footer puede contener información sobre cambios significativos y también es el lugar para hacer referencia a issues de GitHub, tickets de Jira y otros PRs que están relacionados con el commit.
|
||||
|
||||
```
|
||||
CAMBIO SIGNIFICATIVO: <resumen del cambio significativo>
|
||||
<LINEA VACIA>
|
||||
<descripción del cambio significativo + instrucciones para la migración>
|
||||
<LINEA VACIA>
|
||||
<LINEA VACIA>
|
||||
Fix #<issue número>
|
||||
```
|
||||
|
||||
La sección de cambios significativos debería comenzar con la frase "CAMBIO SIGNIFICATIVO: " seguido de un resumen del cambio significativo, una línea en blanco y una descripción detallada del cambio significativo que también incluya instrucciones de migración.
|
||||
|
||||
|
||||
### Revirtiendo commits
|
||||
|
||||
Si el commit revierte un commit previo, el commit debería comenzar con `revert: `, seguido por el header del commit revertido.
|
||||
|
||||
El contenido del mensaje del commit debería contener:
|
||||
|
||||
- Información sobre el SHA del commit que se revierte en el siguiente formato: `Esto revierte el commit <SHA>`,
|
||||
- Una descripción clara de la razón para revertir el mensaje del _commit_.
|
||||
|
||||
|
||||
## <a name="cla"></a> Firma del Acuerdo de Licencia de Colaborador (CLA)
|
||||
|
||||
Por favor firma nuestro Acuerdo de Licencia de Colaborador (CLA) cuando creas tu primer pull request. Para que cualquier cambio de código sea aceptado, el Acuerdo de Licencia de Colaborador (CLA) debe ser firmado. Es un proceso rápido con nuestro CLA assistant que está integrado con nuestro CI.
|
||||
|
||||
Los siguientes documentos pueden ayudarte a resolver problemas con cuentas de GitHub y múltiples direcciones de correo electrónico:
|
||||
If you have more than one Git identity, you must make sure that you sign the CLA using the primary email address associated with the ID that has been granted access to the Angular repository. Git identities can be associated with more than one email address, and only one is primary. Here are some links to help you sort out multiple Git identities and email addresses:
|
||||
|
||||
* https://help.github.com/articles/setting-your-commit-email-address-in-git/
|
||||
* https://stackoverflow.com/questions/37245303/what-does-usera-committed-with-userb-13-days-ago-on-github-mean
|
||||
* https://help.github.com/articles/about-commit-email-addresses/
|
||||
* https://help.github.com/articles/blocking-command-line-pushes-that-expose-your-personal-email-address/
|
||||
* https://help.github.com/articles/blocking-command-line-pushes-that-expose-your-personal-email-address/
|
||||
|
||||
Note that if you have more than one Git identity, it is important to verify that you are logged in with the same ID with which you signed the CLA, before you commit changes. If not, your PR will fail the CLA check.
|
||||
|
||||
<hr>
|
||||
|
||||
|
||||
[angular-group]: https://groups.google.com/forum/#!forum/angular
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2010-2020 Google LLC. http://angular.io/license
|
||||
Copyright (c) 2014-2018 Google, Inc. http://angular.io
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
26
README.en.md
26
README.en.md
@ -1,26 +0,0 @@
|
||||
[](https://circleci.com/gh/angular/workflows/angular/tree/master)
|
||||
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://www.npmjs.com/@angular/core)
|
||||
|
||||
|
||||
# Angular
|
||||
|
||||
Angular is a development platform for building mobile and desktop web applications using TypeScript/JavaScript and other languages.
|
||||
|
||||
## Quickstart
|
||||
|
||||
[Get started in 5 minutes][quickstart].
|
||||
|
||||
## Changelog
|
||||
|
||||
[Learn about the latest improvements][changelog].
|
||||
|
||||
## Want to help?
|
||||
|
||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
||||
guidelines for [contributing][contributing] and then check out one of our issues in the [hotlist: community-help](https://github.com/angular/angular/labels/hotlist%3A%20community-help).
|
||||
|
||||
[contributing]: https://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[quickstart]: https://angular.io/start
|
||||
[changelog]: https://github.com/angular/angular/blob/master/CHANGELOG.md
|
||||
[ng]: https://angular.io
|
33
README.md
33
README.md
@ -1,25 +1,28 @@
|
||||
# Angular en español
|
||||
[](https://travis-ci.org/angular/angular)
|
||||
[](https://circleci.com/gh/angular/angular/tree/master)
|
||||
[](https://www.browserstack.com/automate/public-build/LzF3RzBVVGt6VWE2S0hHaC9uYllOZz09LS1BVjNTclBKV0x4eVRlcjA4QVY1M0N3PT0=--eb4ce8c8dc2c1c5b2b5352d473ee12a73ac20e06)
|
||||
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://www.npmjs.com/@angular/core)
|
||||
|
||||
Angular es una plataforma de desarrollo para construir aplicaciones web y móviles que usa
|
||||
TypeScript/JavaScript y otros lenguajes de programación.
|
||||
|
||||
## ¿Quieres ayudar?
|
||||
[](https://saucelabs.com/u/angular2-ci)
|
||||
|
||||
### Documentación en español
|
||||
*Safari (7+), iOS (7+) and IE mobile (11) are tested on [BrowserStack][browserstack].*
|
||||
|
||||
¿Quieres mejorar la documentación? ¡Excelente! Lee nuestras pautas para
|
||||
[colaborar](CONTRIBUTING.md) y luego revisa algunos de nuestras
|
||||
[issues](https://github.com/angular-hispano/angular/issues).
|
||||
# Angular
|
||||
|
||||
### El framework
|
||||
Angular is a development platform for building mobile and desktop web applications using Typescript/JavaScript and other languages.
|
||||
|
||||
La colaboración para corregir errores y agregar funciones en el framework debe realizarse en inglés a través
|
||||
del repositorio [angular/angular](https://github.com/angular/angular) upstream.
|
||||
## Quickstart
|
||||
|
||||
## Guía rápida
|
||||
[Get started in 5 minutes][quickstart].
|
||||
|
||||
[Comienza a usarlo en 5 minutos](https://docs.angular.lat/start).
|
||||
## Want to help?
|
||||
|
||||
## Registro de cambios (Changelog)
|
||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
||||
guidelines for [contributing][contributing] and then check out one of our issues in the [hotlist: community-help](https://github.com/angular/angular/labels/hotlist%3A%20community-help).
|
||||
|
||||
[Últimas mejoras realizadas](CHANGELOG.md).
|
||||
[browserstack]: https://www.browserstack.com/automate/public-build/LzF3RzBVVGt6VWE2S0hHaC9uYllOZz09LS1BVjNTclBKV0x4eVRlcjA4QVY1M0N3PT0=--eb4ce8c8dc2c1c5b2b5352d473ee12a73ac20e06
|
||||
[contributing]: http://github.com/angular/angular/blob/master/CONTRIBUTING.md
|
||||
[quickstart]: https://angular.io/docs/ts/latest/quickstart.html
|
||||
[ng]: http://angular.io
|
||||
|
140
WORKSPACE
140
WORKSPACE
@ -1,108 +1,58 @@
|
||||
workspace(
|
||||
name = "angular",
|
||||
managed_directories = {"@npm": ["node_modules"]},
|
||||
)
|
||||
workspace(name = "angular")
|
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
|
||||
|
||||
# Fetch rules_nodejs so we can install our npm dependencies
|
||||
http_archive(
|
||||
git_repository(
|
||||
name = "build_bazel_rules_nodejs",
|
||||
sha256 = "84abf7ac4234a70924628baa9a73a5a5cbad944c4358cf9abdb4aab29c9a5b77",
|
||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.7.0/rules_nodejs-1.7.0.tar.gz"],
|
||||
remote = "https://github.com/bazelbuild/rules_nodejs.git",
|
||||
commit = "230d39a391226f51c03448f91eb61370e2e58c42",
|
||||
)
|
||||
|
||||
# Check the rules_nodejs version and download npm dependencies
|
||||
# Note: bazel (version 2 and after) will check the .bazelversion file so we don't need to
|
||||
# assert on that.
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "check_rules_nodejs_version", "node_repositories", "yarn_install")
|
||||
load("@build_bazel_rules_nodejs//:defs.bzl", "check_bazel_version", "node_repositories")
|
||||
|
||||
check_rules_nodejs_version(minimum_version_string = "1.7.0")
|
||||
check_bazel_version("0.9.0")
|
||||
node_repositories(package_json = ["//:package.json"])
|
||||
|
||||
# Setup the Node.js toolchain
|
||||
node_repositories(
|
||||
node_repositories = {
|
||||
"12.14.1-darwin_amd64": ("node-v12.14.1-darwin-x64.tar.gz", "node-v12.14.1-darwin-x64", "0be10a28737527a1e5e3784d3ad844d742fe8b0718acd701fd48f718fd3af78f"),
|
||||
"12.14.1-linux_amd64": ("node-v12.14.1-linux-x64.tar.xz", "node-v12.14.1-linux-x64", "07cfcaa0aa9d0fcb6e99725408d9e0b07be03b844701588e3ab5dbc395b98e1b"),
|
||||
"12.14.1-windows_amd64": ("node-v12.14.1-win-x64.zip", "node-v12.14.1-win-x64", "1f96ccce3ba045ecea3f458e189500adb90b8bc1a34de5d82fc10a5bf66ce7e3"),
|
||||
},
|
||||
node_version = "12.14.1",
|
||||
package_json = ["//:package.json"],
|
||||
git_repository(
|
||||
name = "build_bazel_rules_typescript",
|
||||
remote = "https://github.com/bazelbuild/rules_typescript.git",
|
||||
commit = "eb3244363e1cb265c84e723b347926f28c29aa35"
|
||||
)
|
||||
|
||||
load("//integration:angular_integration_test.bzl", "npm_package_archives")
|
||||
|
||||
yarn_install(
|
||||
name = "npm",
|
||||
manual_build_file_contents = npm_package_archives(),
|
||||
package_json = "//:package.json",
|
||||
yarn_lock = "//:yarn.lock",
|
||||
)
|
||||
|
||||
# Install all bazel dependencies of the @npm npm packages
|
||||
load("@npm//:install_bazel_dependencies.bzl", "install_bazel_dependencies")
|
||||
|
||||
install_bazel_dependencies()
|
||||
|
||||
# Load angular dependencies
|
||||
load("//packages/bazel:package.bzl", "rules_angular_dev_dependencies")
|
||||
|
||||
rules_angular_dev_dependencies()
|
||||
|
||||
# Load protractor dependencies
|
||||
load("@npm_bazel_protractor//:package.bzl", "npm_bazel_protractor_dependencies")
|
||||
|
||||
npm_bazel_protractor_dependencies()
|
||||
|
||||
# Load karma dependencies
|
||||
load("@npm_bazel_karma//:package.bzl", "npm_bazel_karma_dependencies")
|
||||
|
||||
npm_bazel_karma_dependencies()
|
||||
|
||||
# Setup the rules_webtesting toolchain
|
||||
load("@io_bazel_rules_webtesting//web:repositories.bzl", "web_test_repositories")
|
||||
|
||||
web_test_repositories()
|
||||
|
||||
load("//dev-infra/browsers:browser_repositories.bzl", "browser_repositories")
|
||||
|
||||
browser_repositories()
|
||||
|
||||
# Setup the rules_typescript tooolchain
|
||||
load("@npm_bazel_typescript//:index.bzl", "ts_setup_workspace")
|
||||
load("@build_bazel_rules_typescript//:defs.bzl", "ts_setup_workspace")
|
||||
|
||||
ts_setup_workspace()
|
||||
|
||||
# Setup the rules_sass toolchain
|
||||
load("@io_bazel_rules_sass//sass:sass_repositories.bzl", "sass_repositories")
|
||||
|
||||
sass_repositories()
|
||||
|
||||
# Setup the skydoc toolchain
|
||||
load("@io_bazel_skydoc//skylark:skylark.bzl", "skydoc_repositories")
|
||||
|
||||
skydoc_repositories()
|
||||
|
||||
load("@bazel_toolchains//rules:environments.bzl", "clang_env")
|
||||
load("@bazel_toolchains//rules:rbe_repo.bzl", "rbe_autoconfig")
|
||||
|
||||
rbe_autoconfig(
|
||||
name = "rbe_ubuntu1604_angular",
|
||||
# Need to specify a base container digest in order to ensure that we can use the checked-in
|
||||
# platform configurations for the "ubuntu16_04" image. Otherwise the autoconfig rule would
|
||||
# need to pull the image and run it in order determine the toolchain configuration. See:
|
||||
# https://github.com/bazelbuild/bazel-toolchains/blob/3.2.0/configs/ubuntu16_04_clang/versions.bzl
|
||||
base_container_digest = "sha256:5e750dd878df9fcf4e185c6f52b9826090f6e532b097f286913a428290622332",
|
||||
# Note that if you change the `digest`, you might also need to update the
|
||||
# `base_container_digest` to make sure marketplace.gcr.io/google/rbe-ubuntu16-04-webtest:<digest>
|
||||
# and marketplace.gcr.io/google/rbe-ubuntu16-04:<base_container_digest> have
|
||||
# the same Clang and JDK installed. Clang is needed because of the dependency on
|
||||
# @com_google_protobuf. Java is needed for the Bazel's test executor Java tool.
|
||||
digest = "sha256:f743114235a43355bf8324e2ba0fa6a597236fe06f7bc99aaa9ac703631c306b",
|
||||
env = clang_env(),
|
||||
registry = "marketplace.gcr.io",
|
||||
# We can't use the default "ubuntu16_04" RBE image provided by the autoconfig because we need
|
||||
# a specific Linux kernel that comes with "libx11" in order to run headless browser tests.
|
||||
repository = "google/rbe-ubuntu16-04-webtest",
|
||||
use_checked_in_confs = "Force",
|
||||
local_repository(
|
||||
name = "rxjs",
|
||||
path = "node_modules/rxjs/src",
|
||||
)
|
||||
|
||||
git_repository(
|
||||
name = "com_github_bazelbuild_buildtools",
|
||||
remote = "https://github.com/bazelbuild/buildtools.git",
|
||||
# Note, this commit matches the version of buildifier in angular/ngcontainer
|
||||
# If you change this, also check if it matches the version in the angular/ngcontainer
|
||||
# version in /.circleci/config.yml
|
||||
commit = "b3b620e8bcff18ed3378cd3f35ebeb7016d71f71",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "io_bazel_rules_go",
|
||||
url = "https://github.com/bazelbuild/rules_go/releases/download/0.7.1/rules_go-0.7.1.tar.gz",
|
||||
sha256 = "341d5eacef704415386974bc82a1783a8b7ffbff2ab6ba02375e1ca20d9b031c",
|
||||
)
|
||||
|
||||
load("@io_bazel_rules_go//go:def.bzl", "go_rules_dependencies", "go_register_toolchains")
|
||||
|
||||
go_rules_dependencies()
|
||||
|
||||
go_register_toolchains()
|
||||
|
||||
# Fetching the Bazel source code allows us to compile the Skylark linter
|
||||
http_archive(
|
||||
name = "io_bazel",
|
||||
url = "https://github.com/bazelbuild/bazel/archive/9755c72b48866ed034bd28aa033e9abd27431b1e.zip",
|
||||
strip_prefix = "bazel-9755c72b48866ed034bd28aa033e9abd27431b1e",
|
||||
sha256 = "5b8443fc3481b5fcd9e7f348e1dd93c1397f78b223623c39eb56494c55f41962",
|
||||
)
|
||||
|
70
aio/.angular-cli.json
Normal file
70
aio/.angular-cli.json
Normal file
@ -0,0 +1,70 @@
|
||||
{
|
||||
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
|
||||
"project": {
|
||||
"name": "site"
|
||||
},
|
||||
"apps": [
|
||||
{
|
||||
"root": "src",
|
||||
"outDir": "dist",
|
||||
"assets": [
|
||||
"assets",
|
||||
"generated",
|
||||
"app/search/search-worker.js",
|
||||
"favicon.ico",
|
||||
"pwa-manifest.json",
|
||||
"google385281288605d160.html"
|
||||
],
|
||||
"index": "index.html",
|
||||
"main": "main.ts",
|
||||
"polyfills": "polyfills.ts",
|
||||
"test": "test.ts",
|
||||
"tsconfig": "tsconfig.app.json",
|
||||
"testTsconfig": "tsconfig.spec.json",
|
||||
"prefix": "aio",
|
||||
"serviceWorker": false,
|
||||
"styles": [
|
||||
"styles.scss"
|
||||
],
|
||||
"scripts": [
|
||||
],
|
||||
"environmentSource": "environments/environment.ts",
|
||||
"environments": {
|
||||
"dev": "environments/environment.ts",
|
||||
"next": "environments/environment.next.ts",
|
||||
"stable": "environments/environment.stable.ts",
|
||||
"archive": "environments/environment.archive.ts"
|
||||
}
|
||||
}
|
||||
],
|
||||
"e2e": {
|
||||
"protractor": {
|
||||
"config": "./protractor.conf.js"
|
||||
}
|
||||
},
|
||||
"lint": [
|
||||
{
|
||||
"project": "src/tsconfig.app.json"
|
||||
},
|
||||
{
|
||||
"project": "src/tsconfig.spec.json"
|
||||
},
|
||||
{
|
||||
"project": "e2e/tsconfig.e2e.json"
|
||||
}
|
||||
],
|
||||
"test": {
|
||||
"karma": {
|
||||
"config": "./karma.conf.js"
|
||||
}
|
||||
},
|
||||
"defaults": {
|
||||
"styleExt": "scss",
|
||||
"component": {
|
||||
"inlineStyle": true
|
||||
},
|
||||
"build": {
|
||||
"namedChunks": true
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
|
||||
# For additional information regarding the format and rule options, please see:
|
||||
# https://github.com/browserslist/browserslist#queries
|
||||
|
||||
# Googlebot uses an older version of Chrome
|
||||
# For additional information see: https://developers.google.com/search/docs/guides/rendering
|
||||
|
||||
> 0.5%
|
||||
last 2 major versions
|
||||
Firefox ESR
|
||||
not dead
|
||||
IE 11
|
6
aio/.gitignore
vendored
6
aio/.gitignore
vendored
@ -26,13 +26,10 @@
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
/.firebase/
|
||||
/.sass-cache
|
||||
/connect.lock
|
||||
/coverage
|
||||
/libpeerconnection.log
|
||||
debug.log
|
||||
firebase-debug.log
|
||||
npm-debug.log
|
||||
testem.log
|
||||
/typings
|
||||
@ -46,3 +43,6 @@ protractor-results*.txt
|
||||
# System Files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# copied dependencies
|
||||
src/assets/js/lunr*
|
172
aio/README.md
172
aio/README.md
@ -1,131 +1,131 @@
|
||||
# Proyecto de documentación Angular (https://docs.angular.lat)
|
||||
# Angular documentation project (https://angular.io)
|
||||
|
||||
Todo en esta carpeta es parte del proyecto de documentación. Esto incluye:
|
||||
Everything in this folder is part of the documentation project. This includes
|
||||
|
||||
* El sitio web para mostrar la documentación
|
||||
* La configuración de dgeni para convertir los archivos de origen a archivos renderizados que se pueden vizualizar en el sitio web.
|
||||
* Las herramientas para establecer ejemplos para el desarrollo; y generar archivos en tiempo real y archivos zip desde los ejemplos.
|
||||
* the web site for displaying the documentation
|
||||
* the dgeni configuration for converting source files to rendered files that can be viewed in the web site.
|
||||
* the tooling for setting up examples for development; and generating live-example and zip files from the examples.
|
||||
|
||||
## Tareas de desarrollador
|
||||
## Developer tasks
|
||||
|
||||
Nosotros usamos [Yarn](https://yarnpkg.com) para gestionar las dependencias y crear tareas de compilación.
|
||||
Debes ejecutar todas estas tareas desde la carpeta `angular/aio`.
|
||||
Aquí están las tareas más importantes que podrías necesitar usar:
|
||||
We use `yarn` to manage the dependencies and to run build tasks.
|
||||
You should run all these tasks from the `angular/aio` folder.
|
||||
Here are the most important tasks you might need to use:
|
||||
|
||||
* `yarn` - instalar todas las dependencias.
|
||||
* `yarn setup` - instalar todas las dependencias, boilerplate, stackblitz, zips y ejecuta dgeni en los documentos.
|
||||
* `yarn setup-local` - igual que `setup`, pero crea los paquetes de Angular a partir del código y usa estas versiones construidas localmente (en lugar de las recuperadas desde npm) para aio y ejemplos de documentos boilerplate.
|
||||
* `yarn` - install all the dependencies.
|
||||
* `yarn setup` - install all the dependencies, boilerplate, stackblitz, zips and run dgeni on the docs.
|
||||
* `yarn setup-local` - same as `setup`, but use the locally built Angular packages for aio and docs examples boilerplate.
|
||||
|
||||
* `yarn build` - crear una compilación de producción de la aplicación (después de instalar dependencias, boilerplate, etc).
|
||||
* `yarn build-local` - igual que `build`, pero usa `setup-local` en lugar de `setup`.
|
||||
* `yarn build-local-with-viewengine` - igual que `build-local`, pero además también enciende el modo `ViewEngine` (pre-Ivy) en aio.
|
||||
(Nota: Encender el modo `ViewEngine` en ejemplos de documentos, ver `yarn boilerplate:add:viewengine` abajo.)
|
||||
* `yarn build` - create a production build of the application (after installing dependencies, boilerplate, etc).
|
||||
* `yarn build-local` - same as `build`, but use `setup-local` instead of `setup`.
|
||||
|
||||
* `yarn start` - ejecutar un servidor web de desarrollo que observa los archivos; luego crea el doc-viewer y vuelve a cargar la página, según sea necesario.
|
||||
* `yarn serve-and-sync` - ejecutar ambos, el `docs-watch` y `start` en la misma consola.
|
||||
* `yarn lint` - comprobar que el código del doc-viewer sigue nuestras reglas de estilo.
|
||||
* `yarn test` - observar todos los archivos de origen, para el doc-viewer, y ejecuta todas las pruebas unitarias cuando haya algún cambio.
|
||||
* `yarn test --watch=false` -ejecutar todas las pruebas unitarias una sola vez.
|
||||
* `yarn e2e` - ejecutar todas las pruebas de e2e para el doc-viewer.
|
||||
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
|
||||
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
|
||||
* `yarn lint` - check that the doc-viewer code follows our style rules.
|
||||
* `yarn test` - watch all the source files, for the doc-viewer, and run all the unit tests when any change.
|
||||
* `yarn e2e` - run all the e2e tests for the doc-viewer.
|
||||
|
||||
* `yarn docs` - generar toda la documentación desde los archivos fuente.
|
||||
* `yarn docs-watch` - observar el código Angular, los archivos de documentación y ejecutar un 'doc-gen' en corto circuito para los documentos que fueron cambiados.
|
||||
* `yarn docs-lint` - comprobar que el código del documento generado sigue nuestras reglas de estilo.
|
||||
* `yarn docs-test` - ejecutar las pruebas unitarias para el código de generación de doc.
|
||||
* `yarn docs` - generate all the docs from the source files.
|
||||
* `yarn docs-watch` - watch the Angular source and the docs files and run a short-circuited doc-gen for the docs that changed.
|
||||
* `yarn docs-lint` - check that the doc gen code follows our style rules.
|
||||
* `yarn docs-test` - run the unit tests for the doc generation code.
|
||||
|
||||
* `yarn boilerplate:add` - generar todo el código boilerplate para los ejemplos, para que puedan ejecutarse localmente.
|
||||
* `yarn boilerplate:add:viewengine` - igual que `boilerplate:add` pero también enciende el modo `ViewEngine` (pre-Ivy).
|
||||
* `yarn boilerplate:add` - generate all the boilerplate code for the examples, so that they can be run locally. Add the option `--local` to use your local version of Angular contained in the "dist" folder.
|
||||
* `yarn boilerplate:remove` - remove all the boilerplate code that was added via `yarn boilerplate:add`.
|
||||
* `yarn generate-stackblitz` - generate the stackblitz files that are used by the `live-example` tags in the docs.
|
||||
* `yarn generate-zips` - generate the zip files from the examples. Zip available via the `live-example` tags in the docs.
|
||||
|
||||
* `yarn boilerplate:remove` - eliminar todo el código boilerplate que fue añadido a través`yarn boilerplate:add`.
|
||||
* `yarn generate-stackblitz` - generar los archivos stackblitz que están usados por la etiqueta `live-example` en documentos.
|
||||
* `yarn generate-zips` - generar los archivos zip desde los ejemplos. Zip está disponible a través de la etiqueta `live-example` en los documentos.
|
||||
* `yarn example-e2e` - run all e2e tests for examples
|
||||
- `yarn example-e2e --setup` - force webdriver update & other setup, then run tests
|
||||
- `yarn example-e2e --filter=foo` - limit e2e tests to those containing the word "foo"
|
||||
- `yarn example-e2e --setup --local` - run e2e tests with the local version of Angular contained in the "dist" folder
|
||||
|
||||
* `yarn example-e2e` - ejecutar todas las pruebas e2e para ejemplos. Opciones disponibles:
|
||||
- `--setup`: generar boilerplate, forzar la actualización del controlador web y otras configuraciones, luego ejecutar las pruebas.
|
||||
- `--local`: ejecutar pruebas e2e con la versión local de Angular contenida en la carpeta "dist".
|
||||
_Requiere `--setup` para que surta efecto._
|
||||
- `--viewengine`: ejecutar pruebas e2e en modo `ViewEngine` (pre-Ivy).
|
||||
- `--filter=foo`: limitar pruebas e2e a las que contienen la palabra "foo".
|
||||
* `yarn build-ie-polyfills` - generates a js file of polyfills that can be loaded in Internet Explorer.
|
||||
|
||||
> **Nota para usuarios Windows**
|
||||
>
|
||||
> Configurar los ejemplos implica crear algunos [enlaces simbólicos](https://es.wikipedia.org/wiki/Enlace_simb%C3%B3lico) (ver [Aquí](./tools/examples/README.md#symlinked-node_modules) para más detalles). En Windows, esto requiere tener [Habilitado el Modo de desarrollador ](https://blogs.windows.com/windowsdeveloper/2016/12/02/symlinks-windows-10) (compatible con Windows 10 o más reciente) o ejecutar los comandos de configuración cómo administrador.
|
||||
>
|
||||
> Los comandos afectados son:
|
||||
> - `yarn setup` / `yarn setup-*`
|
||||
> - `yarn build` / `yarn build-*`
|
||||
> - `yarn boilerplate:add`
|
||||
> - `yarn example-e2e --setup`
|
||||
## Using ServiceWorker locally
|
||||
|
||||
## Usando ServiceWorker localmente
|
||||
Since abb36e3cb, running `yarn start --prod` will no longer set up the ServiceWorker, which
|
||||
would require manually running `yarn sw-manifest` and `yarn sw-copy` (something that is not possible
|
||||
with webpack serving the files from memory).
|
||||
|
||||
Ejecutando `yarn start` (incluso cuando se apunta explícitamente al modo de producción) no configura el
|
||||
ServiceWorker. Si quieres probar el ServiceWorker localmente, puedes usar `yarn build` y luego
|
||||
ejecutar los archivos en `dist/` con `yarn http-server dist -p 4200`.
|
||||
If you want to test ServiceWorker locally, you can use `yarn build` and serve the files in `dist/`
|
||||
with `yarn http-server dist -p 4200`.
|
||||
|
||||
For more details see #16745.
|
||||
|
||||
|
||||
## Guía de autoría
|
||||
Existen dos tipos de contenido en la documentación:
|
||||
## Guide to authoring
|
||||
|
||||
* **Documentación de API**: descripciones de los módulos, clases, interfaces, decoradores, etc que son parte de la plataforma Angular.
|
||||
La documentacion de API está generada directamente desde el código fuente.
|
||||
El código fuente está contenido en archivos TypeScript , almacenados en la carpeta `angular/packages`.
|
||||
Cada elemento de la API puede tener un comentario anterior, el cual contiene etiquetas y contenido de estilos JSDoc.
|
||||
El contenido está escrito en markdown.
|
||||
There are two types of content in the documentation:
|
||||
|
||||
* **Otro contenido**: guias, tutoriales, y otro material de marketing.
|
||||
Todos los demás contenidos se escriben utilizando markdown en archivos de texto, ubicados en la carpeta `angular/aio/content`.
|
||||
Más específicamente, hay subcarpetas que contienen tipos particulares de contenido: guías, tutoriales y marketing.
|
||||
* **API docs**: descriptions of the modules, classes, interfaces, decorators, etc that make up the Angular platform.
|
||||
API docs are generated directly from the source code.
|
||||
The source code is contained in TypeScript files, located in the `angular/packages` folder.
|
||||
Each API item may have a preceding comment, which contains JSDoc style tags and content.
|
||||
The content is written in markdown.
|
||||
|
||||
* **Ejempos de código**: los ejemplos de código deben ser comprobables para garantizar su precisión.
|
||||
Además, nuestros ejemplos tienen un aspecto específico y permiten al usuario copiar el código fuente. Para mayor
|
||||
ejemplos se representan en una interfaz con pestañas (e.g. template, HTML, y TypeScript en pestañas separadas). Adicionalmente, algunos son ejemplos en acción, que proporcionan enlaces donde se puede editar el código, ejecutar, y/o descargar. Para obtener más detalles sobre cómo trabajar con ejemplos de código, lea los [fragmentos de código](https://docs.angular.lat/guide/docs-style-guide#code-snippets), [código fuente de marcado ](https://docs.angular.lat/guide/docs-style-guide#source-code-markup), y [ ejemplos en acción ](https://docs.angular.lat/guide/docs-style-guide#live-examples) paginas de los [ autores de guías de estilo ](https://docs.angular.lat/guide/docs-style-guide).
|
||||
* **Other content**: guides, tutorials, and other marketing material.
|
||||
All other content is written using markdown in text files, located in the `angular/aio/content` folder.
|
||||
More specifically, there are sub-folders that contain particular types of content: guides, tutorial and marketing.
|
||||
|
||||
Usamos la herramienta [dgeni](https://github.com/angular/dgeni) para convertir estos archivos en docs que se pueden ver en el doc-viewer.
|
||||
Las [guías de estilo para Autores](https://docs.angular.lat/guide/docs-style-guide) prescriben pautas para
|
||||
escribir páginas de guía, explica cómo usar la documentación de clases y componentes, y cómo marcar el código fuente para producir fragmentos de código.
|
||||
* **Code examples**: code examples need to be testable to ensure their accuracy.
|
||||
Also, our examples have a specific look and feel and allow the user to copy the source code. For larger
|
||||
examples they are rendered in a tabbed interface (e.g. template, HTML, and TypeScript on separate
|
||||
tabs). Additionally, some are live examples, which provide links where the code can be edited, executed, and/or downloaded. For details on working with code examples, please read the [Code snippets](https://angular.io/guide/docs-style-guide#code-snippets), [Source code markup](https://angular.io/guide/docs-style-guide#source-code-markup), and [Live examples](https://angular.io/guide/docs-style-guide#live-examples) pages of the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
||||
|
||||
### Generando documentos completos
|
||||
We use the [dgeni](https://github.com/angular/dgeni) tool to convert these files into docs that can be viewed in the doc-viewer.
|
||||
|
||||
La principal tarea para generar los documentos es `yarn docs`. Esto procesará todos los archivos fuente (API y otros), extrayendo la documentación y generando archivos JSON que pueden ser consumidos por el doc-viewer.
|
||||
The [Authors Style Guide](https://angular.io/guide/docs-style-guide) prescribes guidelines for
|
||||
writing guide pages, explains how to use the documentation classes and components, and how to markup sample source code to produce code snippets.
|
||||
|
||||
### Generación parcial de doc para editores
|
||||
### Generating the complete docs
|
||||
|
||||
La generación completa de documentos puede llevar hasta un minuto. Eso es demasiado lento para la creación y edición eficiente de documentos.
|
||||
The main task for generating the docs is `yarn docs`. This will process all the source files (API and other),
|
||||
extracting the documentation and generating JSON files that can be consumed by the doc-viewer.
|
||||
|
||||
Puedes ealizar pequeños cambios en un editor inteligente que muestre un markdown con formato:
|
||||
>En VS Code, _Cmd-K, V_ abre la vista previa de markdown en el panel lateral; _Cmd-B_ alterna la barra izquierda
|
||||
### Partial doc generation for editors
|
||||
|
||||
Puedes también mirar los cambios que se muestran correctamente en el doc-viewer
|
||||
con un tiempo de ciclo rápido de edición / visualización.
|
||||
Full doc generation can take up to one minute. That's too slow for efficient document creation and editing.
|
||||
|
||||
Para este propósito, usa la tarea `yarn docs-watch`, que observa los cambios en los archivos de origen y solo vuelve a procesar los archivos necesarios para generar los documentos relacionados con el archivo que ha cambiado.
|
||||
Dado que esta tarea tiene accesos directos, es mucho más rápido (a menudo menos de 1 segundo) pero no producirá contenido de fidelidad completa. Por ejemplo, los enlaces a otros documentoss y ejemplos de código pueden no mostrarse correctamente. Esto se nota especialmente en los enlaces a otros documentos y en los ejemplos incrustados, que no siempre se representan correctamente.
|
||||
You can make small changes in a smart editor that displays formatted markdown:
|
||||
>In VS Code, _Cmd-K, V_ opens markdown preview in side pane; _Cmd-B_ toggles left sidebar
|
||||
|
||||
La configuración general es la siguiente:
|
||||
You also want to see those changes displayed properly in the doc viewer
|
||||
with a quick, edit/view cycle time.
|
||||
|
||||
* Abrir una terminal, estar seguro que las dependencias están instaladas; ejecutar una generación inicial del doc; luego iniciar el doc-viewer:
|
||||
For this purpose, use the `yarn docs-watch` task, which watches for changes to source files and only
|
||||
re-processes the the files necessary to generate the docs that are related to the file that has changed.
|
||||
Since this task takes shortcuts, it is much faster (often less than 1 second) but it won't produce full
|
||||
fidelity content. For example, links to other docs and code examples may not render correctly. This is
|
||||
most particularly noticed in links to other docs and in the embedded examples, which may not always render
|
||||
correctly.
|
||||
|
||||
The general setup is as follows:
|
||||
|
||||
* Open a terminal, ensure the dependencies are installed; run an initial doc generation; then start the doc-viewer:
|
||||
|
||||
```bash
|
||||
yarn setup
|
||||
yarn
|
||||
yarn docs
|
||||
yarn start
|
||||
```
|
||||
|
||||
* Abrir una segunda terminal e iniciar el observador de documentos.
|
||||
* Open a second terminal and start watching the docs
|
||||
|
||||
```bash
|
||||
yarn docs-watch
|
||||
```
|
||||
|
||||
>Alternativamente, prueba el comando fusionado `serve-and-sync` que crea, observa y ejecuta en la misma ventana de la terminal
|
||||
>Alternatively, try the consolidated `serve-and-sync` command that builds, watches and serves in the same terminal window
|
||||
```bash
|
||||
yarn serve-and-sync
|
||||
```
|
||||
|
||||
* Abre un navegador con la siguiente dirección https://localhost:4200/ y navega hasta el documento en el que quieras trabajar.
|
||||
Puedes automáticamente abrir el navegador utilizando `yarn start -o` en la primera terminal.
|
||||
* Open a browser at https://localhost:4200/ and navigate to the document on which you want to work.
|
||||
You can automatically open the browser by using `yarn start -o` in the first terminal.
|
||||
|
||||
* Realiza cambios en la página de documentación asociada o en los archivos de ejemplo. Cada vez que un archivo es guardado, la documentación se regenerará, la aplicación se reconstruirá y la página se volverá a cargar.
|
||||
* Make changes to the page's associated doc or example files. Every time a file is saved, the doc will
|
||||
be regenerated, the app will rebuild and the page will reload.
|
||||
|
||||
*Si recibes un error de compilación acerca de los ejemplos o cualquier otro error, asegúrate de consultar las
|
||||
[guías de estilo para Autores](https://angular.io/guide/docs-style-guide) para más información.
|
||||
* If you get a build error complaining about examples or any other odd behavior, be sure to consult
|
||||
the [Authors Style Guide](https://angular.io/guide/docs-style-guide).
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Image metadata and config
|
||||
FROM debian:buster
|
||||
FROM debian:jessie
|
||||
|
||||
LABEL name="angular.io PR preview" \
|
||||
description="This image implements the PR preview functionality for angular.io." \
|
||||
@ -8,62 +8,53 @@ LABEL name="angular.io PR preview" \
|
||||
|
||||
VOLUME /aio-secrets
|
||||
VOLUME /var/www/aio-builds
|
||||
VOLUME /dockerbuild
|
||||
|
||||
EXPOSE 80 443
|
||||
|
||||
|
||||
# Build-time args and env vars
|
||||
# The AIO_ARTIFACT_PATH path needs to be kept in synch with the value of
|
||||
# `aio_preview->steps->store_artifacts->destination` property in `.circleci/config.yml`
|
||||
ARG AIO_ARTIFACT_PATH=aio/dist/aio-snapshot.tgz
|
||||
ARG TEST_AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH
|
||||
ARG AIO_BUILDS_DIR=/var/www/aio-builds
|
||||
ARG TEST_AIO_BUILDS_DIR=/tmp/aio-builds
|
||||
ARG AIO_DOMAIN_NAME=ngbuilds.io
|
||||
ARG TEST_AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME.localhost
|
||||
ARG AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG TEST_AIO_GITHUB_ORGANIZATION=test-org
|
||||
ARG AIO_GITHUB_REPO=angular
|
||||
ARG TEST_AIO_GITHUB_REPO=test-repo
|
||||
ARG AIO_GITHUB_TEAM_SLUGS=aio-auto-previews,aio-contributors
|
||||
ARG TEST_AIO_GITHUB_TEAM_SLUGS=test-team-1,test-team-2
|
||||
ARG TEST_AIO_GITHUB_ORGANIZATION=angular
|
||||
ARG AIO_GITHUB_TEAM_SLUGS=angular-core,aio-contributors
|
||||
ARG TEST_AIO_GITHUB_TEAM_SLUGS=angular-core,aio-contributors
|
||||
ARG AIO_NGINX_HOSTNAME=$AIO_DOMAIN_NAME
|
||||
ARG TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_DOMAIN_NAME
|
||||
ARG AIO_NGINX_PORT_HTTP=80
|
||||
ARG TEST_AIO_NGINX_PORT_HTTP=8080
|
||||
ARG AIO_NGINX_PORT_HTTPS=443
|
||||
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||
ARG AIO_SIGNIFICANT_FILES_PATTERN='^(?:aio|packages)/(?!.*[._]spec\\.[jt]s$)'
|
||||
ARG TEST_AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN
|
||||
ARG AIO_REPO_SLUG=angular/angular
|
||||
ARG TEST_AIO_REPO_SLUG=test-repo/test-slug
|
||||
ARG AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL
|
||||
ARG AIO_PREVIEW_SERVER_HOSTNAME=preview.localhost
|
||||
ARG TEST_AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME
|
||||
ARG AIO_ARTIFACT_MAX_SIZE=26214400
|
||||
ARG TEST_AIO_ARTIFACT_MAX_SIZE=200
|
||||
ARG AIO_PREVIEW_SERVER_PORT=3000
|
||||
ARG TEST_AIO_PREVIEW_SERVER_PORT=3001
|
||||
ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||
ARG AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||
ARG TEST_AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||
ARG AIO_UPLOAD_MAX_SIZE=20971520
|
||||
ARG TEST_AIO_UPLOAD_MAX_SIZE=20971520
|
||||
ARG AIO_UPLOAD_PORT=3000
|
||||
ARG TEST_AIO_UPLOAD_PORT=3001
|
||||
|
||||
ENV AIO_ARTIFACT_PATH=$AIO_ARTIFACT_PATH TEST_AIO_ARTIFACT_PATH=$TEST_AIO_ARTIFACT_PATH \
|
||||
AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
|
||||
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
|
||||
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
|
||||
AIO_GITHUB_REPO=$AIO_GITHUB_REPO TEST_AIO_GITHUB_REPO=$TEST_AIO_GITHUB_REPO \
|
||||
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
|
||||
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
|
||||
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
|
||||
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
|
||||
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
|
||||
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
|
||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||
AIO_SIGNIFICANT_FILES_PATTERN=$AIO_SIGNIFICANT_FILES_PATTERN TEST_AIO_SIGNIFICANT_FILES_PATTERN=$TEST_AIO_SIGNIFICANT_FILES_PATTERN \
|
||||
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
|
||||
AIO_PREVIEW_SERVER_HOSTNAME=$AIO_PREVIEW_SERVER_HOSTNAME TEST_AIO_PREVIEW_SERVER_HOSTNAME=$TEST_AIO_PREVIEW_SERVER_HOSTNAME \
|
||||
AIO_ARTIFACT_MAX_SIZE=$AIO_ARTIFACT_MAX_SIZE TEST_AIO_ARTIFACT_MAX_SIZE=$TEST_AIO_ARTIFACT_MAX_SIZE \
|
||||
AIO_PREVIEW_SERVER_PORT=$AIO_PREVIEW_SERVER_PORT TEST_AIO_PREVIEW_SERVER_PORT=$TEST_AIO_PREVIEW_SERVER_PORT \
|
||||
AIO_WWW_USER=www-data \
|
||||
ENV AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR \
|
||||
AIO_DOMAIN_NAME=$AIO_DOMAIN_NAME TEST_AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME \
|
||||
AIO_GITHUB_ORGANIZATION=$AIO_GITHUB_ORGANIZATION TEST_AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION \
|
||||
AIO_GITHUB_TEAM_SLUGS=$AIO_GITHUB_TEAM_SLUGS TEST_AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS \
|
||||
AIO_LOCALCERTS_DIR=/etc/ssl/localcerts TEST_AIO_LOCALCERTS_DIR=/etc/ssl/localcerts-test \
|
||||
AIO_NGINX_HOSTNAME=$AIO_NGINX_HOSTNAME TEST_AIO_NGINX_HOSTNAME=$TEST_AIO_NGINX_HOSTNAME \
|
||||
AIO_NGINX_LOGS_DIR=/var/log/aio/nginx TEST_AIO_NGINX_LOGS_DIR=/var/log/aio/nginx-test \
|
||||
AIO_NGINX_PORT_HTTP=$AIO_NGINX_PORT_HTTP TEST_AIO_NGINX_PORT_HTTP=$TEST_AIO_NGINX_PORT_HTTP \
|
||||
AIO_NGINX_PORT_HTTPS=$AIO_NGINX_PORT_HTTPS TEST_AIO_NGINX_PORT_HTTPS=$TEST_AIO_NGINX_PORT_HTTPS \
|
||||
AIO_REPO_SLUG=$AIO_REPO_SLUG TEST_AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG \
|
||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
|
||||
AIO_UPLOAD_HOSTNAME=$AIO_UPLOAD_HOSTNAME TEST_AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME \
|
||||
AIO_UPLOAD_MAX_SIZE=$AIO_UPLOAD_MAX_SIZE TEST_AIO_UPLOAD_MAX_SIZE=$TEST_AIO_UPLOAD_MAX_SIZE \
|
||||
AIO_UPLOAD_PORT=$AIO_UPLOAD_PORT TEST_AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT \
|
||||
AIO_WWW_USER=www-data \
|
||||
NODE_ENV=production
|
||||
|
||||
|
||||
@ -72,29 +63,25 @@ RUN mkdir /var/log/aio
|
||||
|
||||
|
||||
# Add extra package sources
|
||||
RUN apt-get update -y && apt-get install -y curl=7.64.0-4+deb10u1
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_12.x | bash -
|
||||
RUN apt-get update -y && apt-get install -y curl
|
||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash -
|
||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||
RUN echo "deb http://ftp.debian.org/debian jessie-backports main" | tee /etc/apt/sources.list.d/backports.list
|
||||
|
||||
|
||||
# Install packages
|
||||
# NOTE: Some packages (such as `nginx`, `nodejs`, `openssl`) make older versions unavailable on the
|
||||
# repositories, so we cannot pin to specific versions for these packages :(
|
||||
# See for example:
|
||||
# - https://github.com/nodesource/distributions/issues/33
|
||||
# - https://askubuntu.com/questions/715104/how-can-i-downgrade-openssl-via-apt-get
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
cron=3.0pl1-134+deb10u1 \
|
||||
dnsmasq=2.80-1 \
|
||||
nano=3.2-3 \
|
||||
nginx \
|
||||
chkconfig \
|
||||
cron \
|
||||
dnsmasq \
|
||||
nano \
|
||||
nodejs \
|
||||
openssl \
|
||||
rsyslog=8.1901.0-1 \
|
||||
vim=2:8.1.0875-5 \
|
||||
yarn=1.22.4-1
|
||||
RUN yarn global add pm2@4.4.0
|
||||
rsyslog \
|
||||
yarn
|
||||
RUN apt-get install -t jessie-backports -y nginx
|
||||
RUN yarn global add pm2@2
|
||||
|
||||
|
||||
# Set up log rotation
|
||||
@ -112,9 +99,9 @@ RUN printenv | grep AIO_ >> /etc/environment
|
||||
# Set up dnsmasq
|
||||
COPY dnsmasq/dnsmasq.conf /etc/
|
||||
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
RUN sed -i "s|{{\$TEST_AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/dnsmasq.conf
|
||||
|
||||
|
||||
# Set up SSL/TLS certificates
|
||||
@ -138,9 +125,9 @@ RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||
|
||||
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
@ -149,13 +136,14 @@ RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/co
|
||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_HOSTNAME}}|$TEST_AIO_PREVIEW_SERVER_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_ARTIFACT_MAX_SIZE}}|$TEST_AIO_ARTIFACT_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_PREVIEW_SERVER_PORT}}|$TEST_AIO_PREVIEW_SERVER_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||
|
||||
|
||||
# Set up pm2
|
||||
RUN pm2 startup --user root > /dev/null
|
||||
RUN pm2 startup systemv -u root > /dev/null
|
||||
RUN chkconfig pm2-root on
|
||||
|
||||
|
||||
# Set up the shell scripts
|
||||
@ -167,7 +155,8 @@ RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
|
||||
|
||||
# Set up the Node.js scripts
|
||||
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn --cwd="$AIO_SCRIPTS_JS_DIR/" install --production --frozen-lockfile
|
||||
WORKDIR $AIO_SCRIPTS_JS_DIR/
|
||||
RUN yarn install --production --frozen-lockfile
|
||||
|
||||
|
||||
# Set up health check
|
||||
|
@ -1,2 +1,2 @@
|
||||
# Periodically clean up builds that do not correspond to currently open PRs
|
||||
0 12 * * * /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1
|
||||
0 12 * * * root /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1
|
||||
|
@ -8,9 +8,9 @@ listen-address=127.0.0.1
|
||||
|
||||
# Force an IP address for these domains.
|
||||
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$AIO_UPLOAD_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_PREVIEW_SERVER_HOSTNAME}}/127.0.0.1
|
||||
address=/{{$TEST_AIO_UPLOAD_HOSTNAME}}/127.0.0.1
|
||||
|
||||
# Run as root (required from inside docker container).
|
||||
user=root
|
||||
|
@ -1,9 +0,0 @@
|
||||
/var/log/aio/preview-server-*.log {
|
||||
compress
|
||||
copytruncate
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
/var/log/aio/upload-server-*.log {
|
||||
compress
|
||||
copytruncate
|
||||
delaycompress
|
||||
missingok
|
||||
monthly
|
||||
notifempty
|
||||
rotate 6
|
||||
}
|
@ -36,11 +36,6 @@ server {
|
||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||
|
||||
error_page 404 /404.html;
|
||||
location "=/404.html" {
|
||||
internal;
|
||||
}
|
||||
|
||||
location "~/[^/]+\.[^/]+$" {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
@ -71,32 +66,24 @@ server {
|
||||
return 200 '';
|
||||
}
|
||||
|
||||
# Check PRs previewability
|
||||
location "~^/can-have-public-preview/\d+/?$" {
|
||||
if ($request_method != "GET") {
|
||||
add_header Allow "GET";
|
||||
return 405;
|
||||
}
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method GET;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
||||
# Notify about CircleCI builds
|
||||
location "~^/circle-build/?$" {
|
||||
# Upload builds
|
||||
location "~^/create-build/(?<pr>[1-9][0-9]*)/(?<sha>[0-9a-f]{40})/?$" {
|
||||
if ($request_method != "POST") {
|
||||
add_header Allow "POST";
|
||||
return 405;
|
||||
}
|
||||
|
||||
client_body_temp_path /tmp/aio-create-builds;
|
||||
client_body_buffer_size 128K;
|
||||
client_max_body_size {{$AIO_UPLOAD_MAX_SIZE}};
|
||||
client_body_in_file_only on;
|
||||
|
||||
proxy_pass_request_headers on;
|
||||
proxy_set_header X-FILE $request_body_file;
|
||||
proxy_set_body off;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
proxy_method GET;
|
||||
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
@ -111,7 +98,7 @@ server {
|
||||
proxy_pass_request_headers on;
|
||||
proxy_redirect off;
|
||||
proxy_method POST;
|
||||
proxy_pass http://{{$AIO_PREVIEW_SERVER_HOSTNAME}}:{{$AIO_PREVIEW_SERVER_PORT}}$request_uri;
|
||||
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri;
|
||||
|
||||
resolver 127.0.0.1;
|
||||
}
|
||||
|
@ -3,54 +3,29 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty, getPrInfoFromDownloadPath, Logger} from '../common/utils';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
|
||||
// Classes
|
||||
export class BuildCleaner {
|
||||
|
||||
private logger = new Logger('BuildCleaner');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string, protected githubOrg: string, protected githubRepo: string,
|
||||
protected githubToken: string, protected downloadsDir: string, protected artifactPath: string) {
|
||||
constructor(protected buildsDir: string, protected repoSlug: string, protected githubToken: string) {
|
||||
assertNotMissingOrEmpty('buildsDir', buildsDir);
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
assertNotMissingOrEmpty('downloadsDir', downloadsDir);
|
||||
assertNotMissingOrEmpty('artifactPath', artifactPath);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public async cleanUp(): Promise<void> {
|
||||
try {
|
||||
this.logger.log('Cleaning up builds and downloads');
|
||||
const openPrs = await this.getOpenPrNumbers();
|
||||
this.logger.log(`Open pull requests: ${openPrs.length}`);
|
||||
await Promise.all([
|
||||
this.cleanBuilds(openPrs),
|
||||
this.cleanDownloads(openPrs),
|
||||
]);
|
||||
} catch (error) {
|
||||
this.logger.error('ERROR:', error);
|
||||
throw error;
|
||||
}
|
||||
public cleanUp(): Promise<void> {
|
||||
return Promise.all([
|
||||
this.getExistingBuildNumbers(),
|
||||
this.getOpenPrNumbers(),
|
||||
]).then(([existingBuilds, openPrs]) => this.removeUnnecessaryBuilds(existingBuilds, openPrs));
|
||||
}
|
||||
|
||||
public async cleanBuilds(openPrs: number[]): Promise<void> {
|
||||
const existingBuilds = await this.getExistingBuildNumbers();
|
||||
await this.removeUnnecessaryBuilds(existingBuilds, openPrs);
|
||||
}
|
||||
|
||||
public async cleanDownloads(openPrs: number[]): Promise<void> {
|
||||
const existingDownloads = await this.getExistingDownloads();
|
||||
await this.removeUnnecessaryDownloads(existingDownloads, openPrs);
|
||||
}
|
||||
|
||||
public getExistingBuildNumbers(): Promise<number[]> {
|
||||
return new Promise<number[]>((resolve, reject) => {
|
||||
// Methods - Protected
|
||||
protected getExistingBuildNumbers(): Promise<number[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readdir(this.buildsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
@ -66,29 +41,32 @@ export class BuildCleaner {
|
||||
});
|
||||
}
|
||||
|
||||
public async getOpenPrNumbers(): Promise<number[]> {
|
||||
const api = new GithubApi(this.githubToken);
|
||||
const githubPullRequests = new GithubPullRequests(api, this.githubOrg, this.githubRepo);
|
||||
const prs = await githubPullRequests.fetchAll('open');
|
||||
return prs.map(pr => pr.number);
|
||||
protected getOpenPrNumbers(): Promise<number[]> {
|
||||
const githubPullRequests = new GithubPullRequests(this.githubToken, this.repoSlug);
|
||||
|
||||
return githubPullRequests.
|
||||
fetchAll('open').
|
||||
then(prs => prs.map(pr => pr.number));
|
||||
}
|
||||
|
||||
public removeDir(dir: string): void {
|
||||
protected removeDir(dir: string) {
|
||||
try {
|
||||
if (shell.test('-d', dir)) {
|
||||
shell.chmod('-R', 'a+w', dir);
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a+w', dir);
|
||||
shell.rm('-rf', dir);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||
console.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
public removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]): void {
|
||||
protected removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]) {
|
||||
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
|
||||
|
||||
this.logger.log(`Existing builds: ${existingBuildNumbers.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||
console.log(`Existing builds: ${existingBuildNumbers.length}`);
|
||||
console.log(`Open pull requests: ${openPrNumbers.length}`);
|
||||
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||
|
||||
// Try removing public dirs.
|
||||
toRemove.
|
||||
@ -100,29 +78,4 @@ export class BuildCleaner {
|
||||
map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))).
|
||||
forEach(dir => this.removeDir(dir));
|
||||
}
|
||||
|
||||
public getExistingDownloads(): Promise<string[]> {
|
||||
const artifactFile = path.basename(this.artifactPath);
|
||||
return new Promise<string[]>((resolve, reject) => {
|
||||
fs.readdir(this.downloadsDir, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
files = files.filter(file => file.endsWith(artifactFile));
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public removeUnnecessaryDownloads(existingDownloads: string[], openPrNumbers: number[]): void {
|
||||
const toRemove = existingDownloads.filter(filePath => {
|
||||
const {pr} = getPrInfoFromDownloadPath(filePath);
|
||||
return !openPrNumbers.includes(pr);
|
||||
});
|
||||
|
||||
this.logger.log(`Existing downloads: ${existingDownloads.length}`);
|
||||
this.logger.log(`Removing ${toRemove.length} download(s): ${toRemove.join(', ')}`);
|
||||
|
||||
toRemove.forEach(filePath => shell.rm(path.join(this.downloadsDir, filePath)));
|
||||
}
|
||||
}
|
||||
|
@ -1,26 +1,23 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
} from '../common/env-variables';
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {BuildCleaner} from './build-cleaner';
|
||||
|
||||
// Constants
|
||||
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN', true);
|
||||
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
const buildCleaner = new BuildCleaner(
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_DOWNLOADS_DIR,
|
||||
AIO_ARTIFACT_PATH);
|
||||
function _main() {
|
||||
console.log(`[${new Date()}] - Cleaning up builds...`);
|
||||
|
||||
buildCleaner.cleanUp().catch(() => process.exit(1));
|
||||
const buildCleaner = new BuildCleaner(AIO_BUILDS_DIR, AIO_REPO_SLUG, AIO_GITHUB_TOKEN);
|
||||
|
||||
buildCleaner.cleanUp().catch(err => {
|
||||
console.error('ERROR:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
@ -1,90 +0,0 @@
|
||||
// Imports
|
||||
import fetch from 'node-fetch';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
// Constants
|
||||
const CIRCLE_CI_API_URL = 'https://circleci.com/api/v1.1/project/github';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface ArtifactInfo {
|
||||
path: string;
|
||||
pretty_path: string;
|
||||
node_index: number;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export type ArtifactResponse = ArtifactInfo[];
|
||||
|
||||
export interface BuildInfo {
|
||||
reponame: string;
|
||||
failed: boolean;
|
||||
branch: string;
|
||||
username: string;
|
||||
build_num: number;
|
||||
has_artifacts: boolean;
|
||||
outcome: string; // e.g. 'success'
|
||||
vcs_revision: string; // HEAD SHA
|
||||
// there are other fields but they are not used in this code
|
||||
}
|
||||
|
||||
/**
|
||||
* A Helper that can interact with the CircleCI API.
|
||||
*/
|
||||
export class CircleCiApi {
|
||||
|
||||
private tokenParam = `circle-token=${this.circleCiToken}`;
|
||||
|
||||
/**
|
||||
* Construct a helper that can interact with the CircleCI REST API.
|
||||
* @param githubOrg The Github organisation whose repos we want to access in CircleCI (e.g. angular).
|
||||
* @param githubRepo The Github repo whose builds we want to access in CircleCI (e.g. angular).
|
||||
* @param circleCiToken The CircleCI API access token (secret).
|
||||
*/
|
||||
constructor(
|
||||
private githubOrg: string,
|
||||
private githubRepo: string,
|
||||
private circleCiToken: string,
|
||||
) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
assertNotMissingOrEmpty('circleCiToken', circleCiToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the info for a build from the CircleCI API
|
||||
* @param buildNumber The CircleCI build number that generated the artifact.
|
||||
* @returns A promise to the info about the build
|
||||
*/
|
||||
public async getBuildInfo(buildNumber: number): Promise<BuildInfo> {
|
||||
try {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
const response = await fetch(`${baseUrl}?${this.tokenParam}`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`${baseUrl}: ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI build info request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the CircleCI API to get a URL for a specified artifact from a specified build.
|
||||
* @param artifactPath The path, within the build to the artifact.
|
||||
* @returns A promise to the URL that can be requested to download the actual build artifact file.
|
||||
*/
|
||||
public async getBuildArtifactUrl(buildNumber: number, artifactPath: string): Promise<string> {
|
||||
const baseUrl = `${CIRCLE_CI_API_URL}/${this.githubOrg}/${this.githubRepo}/${buildNumber}`;
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/artifacts?${this.tokenParam}`);
|
||||
const artifacts = await response.json() as ArtifactResponse;
|
||||
const artifact = artifacts.find(item => item.path === artifactPath);
|
||||
if (!artifact) {
|
||||
throw new Error(`Missing artifact (${artifactPath}) for CircleCI build: ${buildNumber}`);
|
||||
}
|
||||
return artifact.url;
|
||||
} catch (error) {
|
||||
throw new Error(`CircleCI artifact URL request failed (${error.message})`);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +1,3 @@
|
||||
// Constants
|
||||
export const AIO_DOWNLOADS_DIR = '/tmp/aio-downloads';
|
||||
export const HIDDEN_DIR_PREFIX = 'hidden--';
|
||||
export const SHORT_SHA_LEN = 7;
|
||||
|
@ -1,19 +0,0 @@
|
||||
import {getEnvVar} from './utils';
|
||||
|
||||
export const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
export const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
export const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
export const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
export const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
|
||||
export const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
export const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
export const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
||||
export const AIO_NGINX_HOSTNAME = getEnvVar('AIO_NGINX_HOSTNAME');
|
||||
export const AIO_NGINX_PORT_HTTP = +getEnvVar('AIO_NGINX_PORT_HTTP');
|
||||
export const AIO_NGINX_PORT_HTTPS = +getEnvVar('AIO_NGINX_PORT_HTTPS');
|
||||
export const AIO_SIGNIFICANT_FILES_PATTERN = getEnvVar('AIO_SIGNIFICANT_FILES_PATTERN');
|
||||
export const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
export const AIO_PREVIEW_SERVER_HOSTNAME = getEnvVar('AIO_PREVIEW_SERVER_HOSTNAME');
|
||||
export const AIO_PREVIEW_SERVER_PORT = +getEnvVar('AIO_PREVIEW_SERVER_PORT');
|
||||
export const AIO_ARTIFACT_MAX_SIZE = +getEnvVar('AIO_ARTIFACT_MAX_SIZE');
|
||||
export const AIO_WWW_USER = getEnvVar('AIO_WWW_USER');
|
@ -28,18 +28,29 @@ export class GithubApi {
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public get<T = any>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
|
||||
public get<T>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('get', path);
|
||||
}
|
||||
|
||||
public post<T = any>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
|
||||
public post<T>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
|
||||
const path = this.buildPath(pathname, params);
|
||||
return this.request<T>('post', path, data);
|
||||
}
|
||||
|
||||
// In GitHub API paginated requests, page numbering is 1-based. (https://developer.github.com/v3/#pagination)
|
||||
public getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 1): Promise<T[]> {
|
||||
// Methods - Protected
|
||||
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
|
||||
if (params == null) {
|
||||
return pathname;
|
||||
}
|
||||
|
||||
const search = (params === null) ? '' : this.serializeSearchParams(params);
|
||||
const joiner = search && '?';
|
||||
|
||||
return `${pathname}${joiner}${search}`;
|
||||
}
|
||||
|
||||
protected getPaginated<T>(pathname: string, baseParams: RequestParams = {}, currentPage: number = 0): Promise<T[]> {
|
||||
const perPage = 100;
|
||||
const params = {
|
||||
...baseParams,
|
||||
@ -56,18 +67,6 @@ export class GithubApi {
|
||||
});
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
|
||||
if (params == null) {
|
||||
return pathname;
|
||||
}
|
||||
|
||||
const search = (params === null) ? '' : this.serializeSearchParams(params);
|
||||
const joiner = search && '?';
|
||||
|
||||
return `${pathname}${joiner}${search}`;
|
||||
}
|
||||
|
||||
protected request<T>(method: string, path: string, data: any = null): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const options = {
|
||||
@ -82,7 +81,7 @@ export class GithubApi {
|
||||
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
|
||||
};
|
||||
const onSuccess = (responseText: string) => {
|
||||
try { resolve(responseText && JSON.parse(responseText)); } catch (err) { reject(err); }
|
||||
try { resolve(JSON.parse(responseText)); } catch (err) { reject(err); }
|
||||
};
|
||||
const onResponse = (res: IncomingMessage) => {
|
||||
const statusCode = res.statusCode || -1;
|
||||
|
@ -1,79 +1,46 @@
|
||||
// Imports
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {GithubApi} from './github-api';
|
||||
import {assert, assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface PullRequest {
|
||||
// Interfaces - Types
|
||||
export interface PullRequest {
|
||||
number: number;
|
||||
user: {login: string};
|
||||
labels: {name: string}[];
|
||||
}
|
||||
|
||||
export interface FileInfo {
|
||||
sha: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export type PullRequestState = 'all' | 'closed' | 'open';
|
||||
|
||||
/**
|
||||
* Access pull requests on GitHub.
|
||||
*/
|
||||
export class GithubPullRequests {
|
||||
public repoSlug: string;
|
||||
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
* @param githubRepo The repository on Github with whose PRs we will interact.
|
||||
*/
|
||||
constructor(private api: GithubApi, githubOrg: string, githubRepo: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
assertNotMissingOrEmpty('githubRepo', githubRepo);
|
||||
this.repoSlug = `${githubOrg}/${githubRepo}`;
|
||||
// Classes
|
||||
export class GithubPullRequests extends GithubApi {
|
||||
// Constructor
|
||||
constructor(githubToken: string, protected repoSlug: string) {
|
||||
super(githubToken);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a comment on a PR.
|
||||
* @param pr The number of the PR on which to comment.
|
||||
* @param body The body of the comment to post.
|
||||
* @returns A promise that resolves when the comment has been posted.
|
||||
*/
|
||||
public addComment(pr: number, body: string): Promise<any> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
assert(!!body, `Invalid or empty comment body: ${body}`);
|
||||
return this.api.post<any>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
|
||||
// Methods - Public
|
||||
public addComment(pr: number, body: string): Promise<void> {
|
||||
if (!(pr > 0)) {
|
||||
throw new Error(`Invalid PR number: ${pr}`);
|
||||
} else if (!body) {
|
||||
throw new Error(`Invalid or empty comment body: ${body}`);
|
||||
}
|
||||
|
||||
return this.post<void>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about a PR.
|
||||
* @param pr The number of the PR for which to request info.
|
||||
* @returns A promise that is resolves with information about the specified PR.
|
||||
*/
|
||||
public fetch(pr: number): Promise<PullRequest> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
// Using the `/issues/` URL, because the `/pulls/` one does not provide labels.
|
||||
return this.api.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
|
||||
return this.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all PRs that match the given state.
|
||||
* @param state Only retrieve PRs that have this state.
|
||||
* @returns A promise that is resolved with information about the requested PRs.
|
||||
*/
|
||||
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
||||
console.log(`Fetching ${state} pull requests...`);
|
||||
|
||||
const pathname = `/repos/${this.repoSlug}/pulls`;
|
||||
const params = {state};
|
||||
|
||||
return this.api.getPaginated<PullRequest>(pathname, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request a list of files for the given PR.
|
||||
* @param pr The number of the PR for which to request files.
|
||||
* @returns A promise that resolves to an array of file information
|
||||
*/
|
||||
public fetchFiles(pr: number): Promise<FileInfo[]> {
|
||||
assert(pr > 0, `Invalid PR number: ${pr}`);
|
||||
return this.api.getPaginated<FileInfo>(`/repos/${this.repoSlug}/pulls/${pr}/files`);
|
||||
return this.getPaginated<PullRequest>(pathname, params);
|
||||
}
|
||||
}
|
||||
|
@ -1,72 +1,45 @@
|
||||
// Imports
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {GithubApi} from './github-api';
|
||||
import {assertNotMissingOrEmpty} from './utils';
|
||||
|
||||
export interface Team {
|
||||
// Interfaces - Types
|
||||
interface Team {
|
||||
id: number;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
export interface TeamMembership {
|
||||
interface TeamMembership {
|
||||
state: string;
|
||||
}
|
||||
|
||||
export class GithubTeams {
|
||||
/**
|
||||
* Create an instance of this helper
|
||||
* @param api An instance of the Github API helper.
|
||||
* @param githubOrg The organisation on GitHub whose repo we will interrogate.
|
||||
*/
|
||||
constructor(private api: GithubApi, protected githubOrg: string) {
|
||||
assertNotMissingOrEmpty('githubOrg', githubOrg);
|
||||
// Classes
|
||||
export class GithubTeams extends GithubApi {
|
||||
// Constructor
|
||||
constructor(githubToken: string, protected organization: string) {
|
||||
super(githubToken);
|
||||
assertNotMissingOrEmpty('organization', organization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request information about all the organisation's teams in GitHub.
|
||||
* @returns A promise that is resolved with information about the teams.
|
||||
*/
|
||||
// Methods - Public
|
||||
public fetchAll(): Promise<Team[]> {
|
||||
return this.api.getPaginated<Team>(`/orgs/${this.githubOrg}/teams`);
|
||||
return this.getPaginated<Team>(`/orgs/${this.organization}/teams`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the specified username is a member of the specified team.
|
||||
* @param username The usernane to check for in the team.
|
||||
* @param teamIds The team to check for the username.
|
||||
* @returns a Promise that resolves to `true` if the username is a member of the team.
|
||||
*/
|
||||
public async isMemberById(username: string, teamIds: number[]): Promise<boolean> {
|
||||
public isMemberById(username: string, teamIds: number[]): Promise<boolean> {
|
||||
const getMembership = (teamId: number) =>
|
||||
this.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`).
|
||||
then(membership => membership.state === 'active').
|
||||
catch(() => false);
|
||||
const reduceFn = (promise: Promise<boolean>, teamId: number) =>
|
||||
promise.then(isMember => isMember || getMembership(teamId));
|
||||
|
||||
const getMembership = async (teamId: number) => {
|
||||
try {
|
||||
const {state} = await this.api.get<TeamMembership>(`/teams/${teamId}/memberships/${username}`);
|
||||
return state === 'active';
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
for (const teamId of teamIds) {
|
||||
if (await getMembership(teamId)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return teamIds.reduce(reduceFn, Promise.resolve(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the given username is a member of the teams specified by the team slugs.
|
||||
* @param username The username to check for in the teams.
|
||||
* @param teamSlugs A collection of slugs that represent the teams to check for the the username.
|
||||
* @returns a Promise that resolves to `true` if the usernane is a member of at least one of the specified teams.
|
||||
*/
|
||||
public async isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
|
||||
try {
|
||||
const teams = await this.fetchAll();
|
||||
const teamIds = teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id);
|
||||
return await this.isMemberById(username, teamIds);
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
public isMemberBySlug(username: string, teamSlugs: string[]): Promise<boolean> {
|
||||
return this.fetchAll().
|
||||
then(teams => teams.filter(team => teamSlugs.includes(team.slug)).map(team => team.id)).
|
||||
then(teamIds => this.isMemberById(username, teamIds)).
|
||||
catch(() => false);
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,14 @@
|
||||
// We can't use `import...from` here, because of the following mess:
|
||||
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
|
||||
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
|
||||
//
|
||||
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
|
||||
// `jasmine-core` module and the `jasmine` module).
|
||||
import Jasmine = require('jasmine');
|
||||
import 'source-map-support/register';
|
||||
|
||||
export const runTests = (specFiles: string[]) => {
|
||||
export const runTests = (specFiles: string[], helpers?: string[]) => {
|
||||
// We can't use `import` here, because of the following mess:
|
||||
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
|
||||
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
|
||||
//
|
||||
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
|
||||
// `jasmine-core` module and the `jasmine` module).
|
||||
// tslint:disable-next-line: no-var-requires variable-name
|
||||
const Jasmine = require('jasmine');
|
||||
const config = {
|
||||
helpers,
|
||||
random: true,
|
||||
spec_files: specFiles,
|
||||
stopSpecOnExpectationFailure: true,
|
||||
@ -16,7 +16,7 @@ export const runTests = (specFiles: string[]) => {
|
||||
|
||||
process.on('unhandledRejection', (reason: any) => console.log('Unhandled rejection:', reason));
|
||||
|
||||
const runner = new Jasmine({});
|
||||
const runner = new Jasmine();
|
||||
runner.loadConfig(config);
|
||||
runner.onComplete((passed: boolean) => process.exit(passed ? 0 : 1));
|
||||
runner.execute();
|
||||
|
@ -1,98 +1,17 @@
|
||||
import {basename, resolve as resolvePath} from 'path';
|
||||
import {SHORT_SHA_LEN} from './constants';
|
||||
|
||||
/**
|
||||
* Shorten a SHA to make it more readable
|
||||
* @param sha The SHA to shorten.
|
||||
*/
|
||||
export function computeShortSha(sha: string) {
|
||||
return sha.substr(0, SHORT_SHA_LEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the path for a downloaded artifact file.
|
||||
* @param downloadsDir The directory where artifacts are downloaded
|
||||
* @param pr The PR associated with this artifact.
|
||||
* @param sha The SHA associated with the build for this artifact.
|
||||
* @param artifactPath The path to the artifact on CircleCI.
|
||||
* @returns The fully resolved location for the specified downloaded artifact.
|
||||
*/
|
||||
export function computeArtifactDownloadPath(downloadsDir: string, pr: number, sha: string, artifactPath: string) {
|
||||
return resolvePath(downloadsDir, `${pr}-${computeShortSha(sha)}-${basename(artifactPath)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the PR number and latest commit SHA from a downloaded file path.
|
||||
* @param downloadPath the path to the downloaded file.
|
||||
* @returns An object whose keys are the PR and SHA extracted from the file path.
|
||||
*/
|
||||
export function getPrInfoFromDownloadPath(downloadPath: string) {
|
||||
const file = basename(downloadPath);
|
||||
const [pr, sha] = file.split('-');
|
||||
return {pr: +pr, sha};
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a value is true.
|
||||
* @param value The value to assert.
|
||||
* @param message The message if the value is not true.
|
||||
*/
|
||||
export function assert(value: boolean, message: string) {
|
||||
if (!value) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a parameter is not equal to "".
|
||||
* @param name The name of the parameter.
|
||||
* @param value The value of the parameter.
|
||||
*/
|
||||
// Functions
|
||||
export const assertNotMissingOrEmpty = (name: string, value: string | null | undefined) => {
|
||||
assert(!!value, `Missing or empty required parameter '${name}'!`);
|
||||
if (!value) {
|
||||
throw new Error(`Missing or empty required parameter '${name}'!`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get an environment variable.
|
||||
* @param name The name of the environment variable.
|
||||
* @param isOptional True if the variable is optional.
|
||||
* @returns The value of the variable or "" if it is optional and falsy.
|
||||
* @throws `Error` if the variable is falsy and not optional.
|
||||
*/
|
||||
export const getEnvVar = (name: string, isOptional = false): string => {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!isOptional && !value) {
|
||||
try {
|
||||
throw new Error(`ERROR: Missing required environment variable '${name}'!`);
|
||||
} catch (error) {
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
console.error(`ERROR: Missing required environment variable '${name}'!`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return value || '';
|
||||
};
|
||||
|
||||
/**
|
||||
* A basic logger implementation.
|
||||
* Delegates to `console`, but prepends each message with the current date and specified scope (i.e caller).
|
||||
*/
|
||||
export class Logger {
|
||||
private padding = ' '.repeat(20 - this.scope.length);
|
||||
|
||||
/**
|
||||
* Create a new `Logger` instance for the specified `scope`.
|
||||
* @param scope The logger's scope (added to all messages).
|
||||
*/
|
||||
constructor(private scope: string) {}
|
||||
|
||||
public error(...args: any[]) { this.callMethod('error', args); }
|
||||
public info(...args: any[]) { this.callMethod('info', args); }
|
||||
public log(...args: any[]) { this.callMethod('log', args); }
|
||||
public warn(...args: any[]) { this.callMethod('warn', args); }
|
||||
|
||||
private callMethod(method: 'error' | 'info' | 'log' | 'warn', args: any[]) {
|
||||
console[method](`[${new Date()}]`, `${this.scope}:${this.padding}`, ...args);
|
||||
}
|
||||
}
|
||||
|
@ -1,83 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import fetch from 'node-fetch';
|
||||
import {dirname} from 'path';
|
||||
import {mkdir} from 'shelljs';
|
||||
import {promisify} from 'util';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {assert, assertNotMissingOrEmpty, computeArtifactDownloadPath, Logger} from '../common/utils';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
export interface GithubInfo {
|
||||
org: string;
|
||||
pr: number;
|
||||
repo: string;
|
||||
sha: string;
|
||||
success: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper that can get information about builds and download build artifacts.
|
||||
*/
|
||||
export class BuildRetriever {
|
||||
private logger = new Logger('BuildRetriever');
|
||||
constructor(private api: CircleCiApi, private downloadSizeLimit: number, private downloadDir: string) {
|
||||
assert(downloadSizeLimit > 0, 'Invalid parameter "downloadSizeLimit" should be a number greater than 0.');
|
||||
assertNotMissingOrEmpty('downloadDir', downloadDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get GitHub information about a build
|
||||
* @param buildNum The number of the build for which to retrieve the info.
|
||||
* @returns The Github org, repo, PR and latest SHA for the specified build.
|
||||
*/
|
||||
public async getGithubInfo(buildNum: number): Promise<GithubInfo> {
|
||||
const buildInfo = await this.api.getBuildInfo(buildNum);
|
||||
const githubInfo: GithubInfo = {
|
||||
org: buildInfo.username,
|
||||
pr: getPrFromBranch(buildInfo.branch),
|
||||
repo: buildInfo.reponame,
|
||||
sha: buildInfo.vcs_revision,
|
||||
success: !buildInfo.failed,
|
||||
};
|
||||
return githubInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a request to the given URL for a build artifact and store it locally.
|
||||
* @param buildNum the number of the CircleCI build whose artifact we want to download.
|
||||
* @param pr the number of the PR that triggered the CircleCI build.
|
||||
* @param sha the commit in the PR that triggered the CircleCI build.
|
||||
* @param artifactPath the path on CircleCI where the artifact was stored.
|
||||
* @returns A promise to the file path where the downloaded file was stored.
|
||||
*/
|
||||
public async downloadBuildArtifact(buildNum: number, pr: number, sha: string, artifactPath: string): Promise<string> {
|
||||
try {
|
||||
const outPath = computeArtifactDownloadPath(this.downloadDir, pr, sha, artifactPath);
|
||||
const downloadExists = await new Promise(resolve => fs.exists(outPath, exists => resolve(exists)));
|
||||
if (!downloadExists) {
|
||||
const url = await this.api.getBuildArtifactUrl(buildNum, artifactPath);
|
||||
const response = await fetch(url, {size: this.downloadSizeLimit});
|
||||
if (response.status !== 200) {
|
||||
throw new PreviewServerError(response.status, `Error ${response.status} - ${response.statusText}`);
|
||||
}
|
||||
const buffer = await response.buffer();
|
||||
mkdir('-p', dirname(outPath));
|
||||
await promisify(fs.writeFile)(outPath, buffer);
|
||||
}
|
||||
return outPath;
|
||||
} catch (error) {
|
||||
this.logger.warn(error);
|
||||
const status = (error.type === 'max-size') ? 413 : 500;
|
||||
throw new PreviewServerError(status, `CircleCI artifact download failed (${error.message || error})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getPrFromBranch(branch: string): number {
|
||||
// CircleCI only exposes PR numbers via the `branch` field :-(
|
||||
const match = /^pull\/(\d+)$/.exec(branch);
|
||||
if (!match) {
|
||||
throw new Error(`No PR found in branch field: ${branch}`);
|
||||
}
|
||||
return +match[1];
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
|
||||
/**
|
||||
* A helper to verify whether builds are trusted.
|
||||
*/
|
||||
export class BuildVerifier {
|
||||
/**
|
||||
* Construct a new BuildVerifier instance.
|
||||
* @param prs A helper to access PR information.
|
||||
* @param teams A helper to access Github team information.
|
||||
* @param allowedTeamSlugs The teams that are trusted.
|
||||
* @param trustedPrLabel The github label that indicates that a PR is trusted.
|
||||
*/
|
||||
constructor(protected prs: GithubPullRequests, protected teams: GithubTeams,
|
||||
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
|
||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR contains files that are significant to the build.
|
||||
* @param pr The number of the PR to check
|
||||
* @param significantFilePattern A regex that selects files that are significant.
|
||||
*/
|
||||
public async getSignificantFilesChanged(pr: number, significantFilePattern: RegExp): Promise<boolean> {
|
||||
const files = await this.prs.fetchFiles(pr);
|
||||
return files.some(file => significantFilePattern.test(file.filename));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a PR is trusted.
|
||||
* @param pr The number of the PR to check.
|
||||
* @returns true if the PR is trusted.
|
||||
*/
|
||||
public async getPrIsTrusted(pr: number): Promise<boolean> {
|
||||
const prInfo = await this.prs.fetch(pr);
|
||||
return this.hasLabel(prInfo, this.trustedPrLabel) ||
|
||||
(await this.teams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
|
||||
}
|
||||
|
||||
protected hasLabel(prInfo: PullRequest, label: string): boolean {
|
||||
return prInfo.labels.some(labelObj => labelObj.name === label);
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
// Imports
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {
|
||||
AIO_ARTIFACT_MAX_SIZE,
|
||||
AIO_ARTIFACT_PATH,
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_CIRCLE_CI_TOKEN,
|
||||
AIO_DOMAIN_NAME,
|
||||
AIO_GITHUB_ORGANIZATION,
|
||||
AIO_GITHUB_REPO,
|
||||
AIO_GITHUB_TEAM_SLUGS,
|
||||
AIO_GITHUB_TOKEN,
|
||||
AIO_PREVIEW_SERVER_HOSTNAME,
|
||||
AIO_PREVIEW_SERVER_PORT,
|
||||
AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
AIO_TRUSTED_PR_LABEL,
|
||||
} from '../common/env-variables';
|
||||
import {PreviewServerFactory} from './preview-server-factory';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main(): void {
|
||||
PreviewServerFactory
|
||||
.create({
|
||||
buildArtifactPath: AIO_ARTIFACT_PATH,
|
||||
buildsDir: AIO_BUILDS_DIR,
|
||||
circleCiToken: AIO_CIRCLE_CI_TOKEN,
|
||||
domainName: AIO_DOMAIN_NAME,
|
||||
downloadSizeLimit: AIO_ARTIFACT_MAX_SIZE,
|
||||
downloadsDir: AIO_DOWNLOADS_DIR,
|
||||
githubOrg: AIO_GITHUB_ORGANIZATION,
|
||||
githubRepo: AIO_GITHUB_REPO,
|
||||
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
|
||||
githubToken: AIO_GITHUB_TOKEN,
|
||||
significantFilesPattern: AIO_SIGNIFICANT_FILES_PATTERN,
|
||||
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
|
||||
})
|
||||
.listen(AIO_PREVIEW_SERVER_PORT, AIO_PREVIEW_SERVER_HOSTNAME);
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
// Classes
|
||||
export class PreviewServerError extends Error {
|
||||
// Constructor
|
||||
constructor(public status: number = 500, message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, PreviewServerError.prototype);
|
||||
}
|
||||
}
|
@ -1,213 +0,0 @@
|
||||
// Imports
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {AddressInfo} from 'net';
|
||||
import {CircleCiApi} from '../common/circle-ci-api';
|
||||
import {GithubApi} from '../common/github-api';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assert, assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {BuildCreator} from './build-creator';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {BuildRetriever} from './build-retriever';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
import {respondWithError, throwRequestError} from './utils';
|
||||
|
||||
const AIO_PREVIEW_JOB = 'aio_preview';
|
||||
|
||||
// Interfaces - Types
|
||||
export interface PreviewServerConfig {
|
||||
downloadsDir: string;
|
||||
downloadSizeLimit: number;
|
||||
buildArtifactPath: string;
|
||||
buildsDir: string;
|
||||
domainName: string;
|
||||
githubOrg: string;
|
||||
githubRepo: string;
|
||||
githubTeamSlugs: string[];
|
||||
circleCiToken: string;
|
||||
githubToken: string;
|
||||
significantFilesPattern: string;
|
||||
trustedPrLabel: string;
|
||||
}
|
||||
|
||||
const logger = new Logger('PreviewServer');
|
||||
|
||||
// Classes
|
||||
export class PreviewServerFactory {
|
||||
// Methods - Public
|
||||
public static create(cfg: PreviewServerConfig): http.Server {
|
||||
assertNotMissingOrEmpty('domainName', cfg.domainName);
|
||||
|
||||
const circleCiApi = new CircleCiApi(cfg.githubOrg, cfg.githubRepo, cfg.circleCiToken);
|
||||
const githubApi = new GithubApi(cfg.githubToken);
|
||||
const prs = new GithubPullRequests(githubApi, cfg.githubOrg, cfg.githubRepo);
|
||||
const teams = new GithubTeams(githubApi, cfg.githubOrg);
|
||||
|
||||
const buildRetriever = new BuildRetriever(circleCiApi, cfg.downloadSizeLimit, cfg.downloadsDir);
|
||||
const buildVerifier = new BuildVerifier(prs, teams, cfg.githubTeamSlugs, cfg.trustedPrLabel);
|
||||
const buildCreator = PreviewServerFactory.createBuildCreator(prs, cfg.buildsDir, cfg.domainName);
|
||||
|
||||
const middleware = PreviewServerFactory.createMiddleware(buildRetriever, buildVerifier, buildCreator, cfg);
|
||||
const httpServer = http.createServer(middleware as any);
|
||||
|
||||
httpServer.on('listening', () => {
|
||||
const info = httpServer.address() as AddressInfo;
|
||||
logger.info(`Up and running (and listening on ${info.address}:${info.port})...`);
|
||||
});
|
||||
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
public static createMiddleware(buildRetriever: BuildRetriever, buildVerifier: BuildVerifier,
|
||||
buildCreator: BuildCreator, cfg: PreviewServerConfig): express.Express {
|
||||
const middleware = express();
|
||||
const jsonParser = bodyParser.json();
|
||||
const significantFilesRe = new RegExp(cfg.significantFilesPattern);
|
||||
|
||||
// RESPOND TO IS-ALIVE PING
|
||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||
|
||||
// RESPOND TO CAN-HAVE-PUBLIC-PREVIEW CHECK
|
||||
const canHavePublicPreviewRe = /^\/can-have-public-preview\/(\d+)\/?$/;
|
||||
middleware.get(canHavePublicPreviewRe, async (req, res) => {
|
||||
try {
|
||||
const pr = +canHavePublicPreviewRe.exec(req.url)![1];
|
||||
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
// Cannot have preview: PR did not touch relevant files: `aio/` or `packages/` (except for spec files).
|
||||
res.send({canHavePublicPreview: false, reason: 'No significant files touched.'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because it did not touch any significant files.`);
|
||||
} else if (!await buildVerifier.getPrIsTrusted(pr)) {
|
||||
// Cannot have preview: PR not automatically verifiable as "trusted".
|
||||
res.send({canHavePublicPreview: false, reason: 'Not automatically verifiable as "trusted".'});
|
||||
logger.log(`PR:${pr} - Cannot have a public preview, because not automatically verifiable as "trusted".`);
|
||||
} else {
|
||||
// Can have preview.
|
||||
res.send({canHavePublicPreview: true, reason: null});
|
||||
logger.log(`PR:${pr} - Can have a public preview.`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Previewability check error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// CIRCLE_CI BUILD COMPLETE WEBHOOK
|
||||
middleware.post(/^\/circle-build\/?$/, jsonParser, async (req, res) => {
|
||||
try {
|
||||
if (!(
|
||||
req.is('json') &&
|
||||
req.body &&
|
||||
req.body.payload &&
|
||||
req.body.payload.build_num > 0 &&
|
||||
req.body.payload.build_parameters &&
|
||||
req.body.payload.build_parameters.CIRCLE_JOB
|
||||
)) {
|
||||
throwRequestError(400, `Incorrect body content. Expected JSON`, req);
|
||||
}
|
||||
|
||||
const job = req.body.payload.build_parameters.CIRCLE_JOB;
|
||||
const buildNum = req.body.payload.build_num;
|
||||
|
||||
logger.log(`Build:${buildNum}, Job:${job} - processing web-hook trigger`);
|
||||
|
||||
if (job !== AIO_PREVIEW_JOB) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`Build:${buildNum}, Job:${job} -`,
|
||||
`Skipping preview processing because this is not the "${AIO_PREVIEW_JOB}" job.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { pr, sha, org, repo, success } = await buildRetriever.getGithubInfo(buildNum);
|
||||
|
||||
if (!success) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - Skipping preview processing because this build did not succeed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
assert(cfg.githubOrg === org,
|
||||
`Invalid webhook: expected "githubOrg" property to equal "${cfg.githubOrg}" but got "${org}".`);
|
||||
assert(cfg.githubRepo === repo,
|
||||
`Invalid webhook: expected "githubRepo" property to equal "${cfg.githubRepo}" but got "${repo}".`);
|
||||
|
||||
// Do not deploy unless this PR has touched relevant files: `aio/` or `packages/` (except for spec files)
|
||||
if (!await buildVerifier.getSignificantFilesChanged(pr, significantFilesRe)) {
|
||||
res.sendStatus(204);
|
||||
logger.log(`PR:${pr}, Build:${buildNum} - ` +
|
||||
`Skipping preview processing because this PR did not touch any significant files.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const artifactPath = await buildRetriever.downloadBuildArtifact(buildNum, pr, sha, cfg.buildArtifactPath);
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(pr);
|
||||
await buildCreator.create(pr, sha, artifactPath, isPublic);
|
||||
|
||||
res.sendStatus(isPublic ? 201 : 202);
|
||||
logger.log(`PR:${pr}, SHA:${computeShortSha(sha)}, Build:${buildNum} - ` +
|
||||
`Successfully created ${isPublic ? 'public' : 'non-public'} preview.`);
|
||||
} catch (err) {
|
||||
logger.error('CircleCI webhook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// GITHUB PR UPDATED WEBHOOK
|
||||
middleware.post(/^\/pr-updated\/?$/, jsonParser, async (req, res) => {
|
||||
const { action, number: prNo }: { action?: string, number?: number } = req.body;
|
||||
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
|
||||
|
||||
try {
|
||||
if (!visMayHaveChanged) {
|
||||
res.sendStatus(200);
|
||||
} else if (!prNo) {
|
||||
throwRequestError(400, `Missing or empty 'number' field`, req);
|
||||
} else {
|
||||
const isPublic = await buildVerifier.getPrIsTrusted(prNo);
|
||||
await buildCreator.updatePrVisibility(prNo, isPublic);
|
||||
res.sendStatus(200);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('PR update hook error', err);
|
||||
respondWithError(res, err);
|
||||
}
|
||||
});
|
||||
|
||||
// ALL OTHER REQUESTS
|
||||
middleware.all('*', req => throwRequestError(404, 'Unknown resource', req));
|
||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => {
|
||||
const statusText = http.STATUS_CODES[err.status] || '???';
|
||||
logger.error(`Preview server error: ${err.status} - ${statusText}:`, err.message);
|
||||
respondWithError(res, err);
|
||||
});
|
||||
|
||||
return middleware;
|
||||
}
|
||||
|
||||
public static createBuildCreator(prs: GithubPullRequests, buildsDir: string, domainName: string): BuildCreator {
|
||||
const buildCreator = new BuildCreator(buildsDir);
|
||||
const postPreviewsComment = (pr: number, shas: string[]) => {
|
||||
const body = shas.
|
||||
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
|
||||
join('\n');
|
||||
|
||||
return prs.addComment(pr, body);
|
||||
};
|
||||
|
||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
|
||||
if (isPublic) {
|
||||
postPreviewsComment(pr, [sha]);
|
||||
}
|
||||
});
|
||||
|
||||
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
|
||||
if (isPublic && shas.length) {
|
||||
postPreviewsComment(pr, shas);
|
||||
}
|
||||
});
|
||||
|
||||
return buildCreator;
|
||||
}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import * as express from 'express';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
|
||||
/**
|
||||
* Update the response to report that an error has occurred.
|
||||
* @param res The response to configure as an error.
|
||||
* @param err The error that needs to be reported.
|
||||
*/
|
||||
export async function respondWithError(res: express.Response, err: any): Promise<void> {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, String((err && err.message) || err));
|
||||
}
|
||||
|
||||
res.status(err.status);
|
||||
return new Promise(resolve => res.end(err.message, resolve));
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw an exception that describes the given error information.
|
||||
* @param status The HTTP status code include in the error.
|
||||
* @param error The error message to include in the error.
|
||||
* @param req The request that triggered this error.
|
||||
*/
|
||||
export function throwRequestError(status: number, error: string, req: express.Request): never {
|
||||
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
|
||||
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
|
||||
throw new PreviewServerError(status, message);
|
||||
}
|
@ -4,16 +4,13 @@ import {EventEmitter} from 'events';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {assertNotMissingOrEmpty, computeShortSha, Logger} from '../common/utils';
|
||||
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {PreviewServerError} from './preview-error';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Classes
|
||||
export class BuildCreator extends EventEmitter {
|
||||
|
||||
private logger = new Logger('BuildCreator');
|
||||
|
||||
// Constructor
|
||||
constructor(protected buildsDir: string) {
|
||||
super();
|
||||
@ -21,9 +18,9 @@ export class BuildCreator extends EventEmitter {
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public create(pr: number, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
|
||||
public create(pr: string, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
|
||||
// Use only part of the SHA for more readable URLs.
|
||||
sha = computeShortSha(sha);
|
||||
sha = sha.substr(0, SHORT_SHA_LEN);
|
||||
|
||||
const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic);
|
||||
const shaDir = path.join(prDir, sha);
|
||||
@ -36,7 +33,7 @@ export class BuildCreator extends EventEmitter {
|
||||
then(([prDirExisted, shaDirExisted]) => {
|
||||
if (shaDirExisted) {
|
||||
const publicOrNot = isPublic ? 'public' : 'non-public';
|
||||
throw new PreviewServerError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
throw new UploadError(409, `Request to overwrite existing ${publicOrNot} directory: ${shaDir}`);
|
||||
}
|
||||
|
||||
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
||||
@ -52,15 +49,15 @@ export class BuildCreator extends EventEmitter {
|
||||
shell.rm('-rf', dirToRemoveOnError);
|
||||
}
|
||||
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while creating preview at: ${shaDir}\n${err}`);
|
||||
if (!(err instanceof UploadError)) {
|
||||
err = new UploadError(500, `Error while uploading to directory: ${shaDir}\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
public updatePrVisibility(pr: number, makePublic: boolean): Promise<boolean> {
|
||||
public updatePrVisibility(pr: string, makePublic: boolean): Promise<boolean> {
|
||||
const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic);
|
||||
|
||||
return Promise.
|
||||
@ -71,8 +68,7 @@ export class BuildCreator extends EventEmitter {
|
||||
return false;
|
||||
} else if (targetVisPrDirExisted) {
|
||||
// Error: Directories for both visibilities exist.
|
||||
throw new PreviewServerError(409,
|
||||
`Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
|
||||
throw new UploadError(409, `Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
|
||||
}
|
||||
|
||||
// Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`.
|
||||
@ -83,8 +79,8 @@ export class BuildCreator extends EventEmitter {
|
||||
then(() => true);
|
||||
}).
|
||||
catch(err => {
|
||||
if (!(err instanceof PreviewServerError)) {
|
||||
err = new PreviewServerError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
|
||||
if (!(err instanceof UploadError)) {
|
||||
err = new UploadError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
|
||||
}
|
||||
|
||||
throw err;
|
||||
@ -106,11 +102,12 @@ export class BuildCreator extends EventEmitter {
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
this.logger.warn(stderr);
|
||||
console.warn(stderr);
|
||||
}
|
||||
|
||||
try {
|
||||
shell.chmod('-R', 'a-w', outputDir);
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a-w', outputDir);
|
||||
shell.rm('-f', inputFile);
|
||||
resolve();
|
||||
} catch (err) {
|
||||
@ -120,9 +117,9 @@ export class BuildCreator extends EventEmitter {
|
||||
});
|
||||
}
|
||||
|
||||
protected getCandidatePrDirs(pr: number, isPublic: boolean): {oldPrDir: string, newPrDir: string} {
|
||||
protected getCandidatePrDirs(pr: string, isPublic: boolean) {
|
||||
const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr);
|
||||
const publicPrDir = path.join(this.buildsDir, `${pr}`);
|
||||
const publicPrDir = path.join(this.buildsDir, pr);
|
||||
|
||||
const oldPrDir = isPublic ? hiddenPrDir : publicPrDir;
|
||||
const newPrDir = isPublic ? publicPrDir : hiddenPrDir;
|
@ -0,0 +1,87 @@
|
||||
// Imports
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
|
||||
import {GithubTeams} from '../common/github-teams';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Interfaces - Types
|
||||
interface JwtPayload {
|
||||
slug: string;
|
||||
'pull-request': number;
|
||||
}
|
||||
|
||||
// Enums
|
||||
export enum BUILD_VERIFICATION_STATUS {
|
||||
verifiedAndTrusted,
|
||||
verifiedNotTrusted,
|
||||
}
|
||||
|
||||
// Classes
|
||||
export class BuildVerifier {
|
||||
// Properties - Protected
|
||||
protected githubPullRequests: GithubPullRequests;
|
||||
protected githubTeams: GithubTeams;
|
||||
|
||||
// Constructor
|
||||
constructor(protected secret: string, githubToken: string, protected repoSlug: string, organization: string,
|
||||
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
|
||||
assertNotMissingOrEmpty('secret', secret);
|
||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||
assertNotMissingOrEmpty('organization', organization);
|
||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
|
||||
|
||||
this.githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||
this.githubTeams = new GithubTeams(githubToken, organization);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public getPrIsTrusted(pr: number): Promise<boolean> {
|
||||
return Promise.resolve().
|
||||
then(() => this.githubPullRequests.fetch(pr)).
|
||||
then(prInfo => this.hasLabel(prInfo, this.trustedPrLabel) ||
|
||||
this.githubTeams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
|
||||
}
|
||||
|
||||
public verify(expectedPr: number, authHeader: string): Promise<BUILD_VERIFICATION_STATUS> {
|
||||
return Promise.resolve().
|
||||
then(() => this.extractJwtString(authHeader)).
|
||||
then(jwtString => this.verifyJwt(expectedPr, jwtString)).
|
||||
then(jwtPayload => this.verifyPr(jwtPayload['pull-request'])).
|
||||
catch(err => { throw new UploadError(403, `Error while verifying upload for PR ${expectedPr}: ${err}`); });
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected extractJwtString(input: string): string {
|
||||
return input.replace(/^token +/i, '');
|
||||
}
|
||||
|
||||
protected hasLabel(prInfo: PullRequest, label: string) {
|
||||
return prInfo.labels.some(labelObj => labelObj.name === label);
|
||||
}
|
||||
|
||||
protected verifyJwt(expectedPr: number, token: string): Promise<JwtPayload> {
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.verify(token, this.secret, {issuer: 'Travis CI, GmbH'}, (err, payload: JwtPayload) => {
|
||||
if (err) {
|
||||
reject(err.message || err);
|
||||
} else if (payload.slug !== this.repoSlug) {
|
||||
reject(`jwt slug invalid. expected: ${this.repoSlug}`);
|
||||
} else if (payload['pull-request'] !== expectedPr) {
|
||||
reject(`jwt pull-request invalid. expected: ${expectedPr}`);
|
||||
} else {
|
||||
resolve(payload);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected verifyPr(pr: number): Promise<BUILD_VERIFICATION_STATUS> {
|
||||
return this.getPrIsTrusted(pr).
|
||||
then(isTrusted => Promise.resolve(isTrusted ?
|
||||
BUILD_VERIFICATION_STATUS.verifiedAndTrusted :
|
||||
BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
|
||||
}
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
// Imports
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {BuildVerifier} from './build-verifier';
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main() {
|
||||
const secret = 'unused';
|
||||
const githubToken = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
const repoSlug = getEnvVar('AIO_REPO_SLUG');
|
||||
const organization = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const allowedTeamSlugs = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||
const trustedPrLabel = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
const pr = +getEnvVar('AIO_PREVERIFY_PR');
|
||||
|
||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, organization, allowedTeamSlugs,
|
||||
trustedPrLabel);
|
||||
|
||||
// Exit codes:
|
||||
// - 0: The PR can be automatically trusted (i.e. author belongs to trusted team or PR has the "trusted PR" label).
|
||||
// - 1: An error occurred.
|
||||
// - 2: The PR cannot be automatically trusted.
|
||||
buildVerifier.getPrIsTrusted(pr).
|
||||
then(isTrusted => {
|
||||
if (!isTrusted) {
|
||||
console.warn(
|
||||
`The PR cannot be automatically verified, because it doesn't have the "${trustedPrLabel}" label and the ` +
|
||||
`the author is not an active member of any of the following teams: ${allowedTeamSlugs.join(', ')}`);
|
||||
}
|
||||
|
||||
process.exit(isTrusted ? 0 : 2);
|
||||
}).
|
||||
catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
// Imports
|
||||
import {getEnvVar} from '../common/utils';
|
||||
import {uploadServerFactory} from './upload-server-factory';
|
||||
|
||||
// Constants
|
||||
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
|
||||
const AIO_DOMAIN_NAME = getEnvVar('AIO_DOMAIN_NAME');
|
||||
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
const AIO_PREVIEW_DEPLOYMENT_TOKEN = getEnvVar('AIO_PREVIEW_DEPLOYMENT_TOKEN');
|
||||
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
||||
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
|
||||
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
|
||||
|
||||
// Run
|
||||
_main();
|
||||
|
||||
// Functions
|
||||
function _main() {
|
||||
uploadServerFactory.
|
||||
create({
|
||||
buildsDir: AIO_BUILDS_DIR,
|
||||
domainName: AIO_DOMAIN_NAME,
|
||||
githubOrganization: AIO_GITHUB_ORGANIZATION,
|
||||
githubTeamSlugs: AIO_GITHUB_TEAM_SLUGS.split(','),
|
||||
githubToken: AIO_GITHUB_TOKEN,
|
||||
repoSlug: AIO_REPO_SLUG,
|
||||
secret: AIO_PREVIEW_DEPLOYMENT_TOKEN,
|
||||
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
|
||||
}).
|
||||
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
// Classes
|
||||
export class UploadError extends Error {
|
||||
// Constructor
|
||||
constructor(public status: number = 500, message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, UploadError.prototype);
|
||||
}
|
||||
}
|
@ -0,0 +1,153 @@
|
||||
// Imports
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||
import {BuildCreator} from './build-creator';
|
||||
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from './build-verifier';
|
||||
import {UploadError} from './upload-error';
|
||||
|
||||
// Constants
|
||||
const AUTHORIZATION_HEADER = 'AUTHORIZATION';
|
||||
const X_FILE_HEADER = 'X-FILE';
|
||||
|
||||
// Interfaces - Types
|
||||
interface UploadServerConfig {
|
||||
buildsDir: string;
|
||||
domainName: string;
|
||||
githubOrganization: string;
|
||||
githubTeamSlugs: string[];
|
||||
githubToken: string;
|
||||
repoSlug: string;
|
||||
secret: string;
|
||||
trustedPrLabel: string;
|
||||
}
|
||||
|
||||
// Classes
|
||||
class UploadServerFactory {
|
||||
// Methods - Public
|
||||
public create({
|
||||
buildsDir,
|
||||
domainName,
|
||||
githubOrganization,
|
||||
githubTeamSlugs,
|
||||
githubToken,
|
||||
repoSlug,
|
||||
secret,
|
||||
trustedPrLabel,
|
||||
}: UploadServerConfig): http.Server {
|
||||
assertNotMissingOrEmpty('domainName', domainName);
|
||||
|
||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, githubOrganization, githubTeamSlugs,
|
||||
trustedPrLabel);
|
||||
const buildCreator = this.createBuildCreator(buildsDir, githubToken, repoSlug, domainName);
|
||||
|
||||
const middleware = this.createMiddleware(buildVerifier, buildCreator);
|
||||
const httpServer = http.createServer(middleware as any);
|
||||
|
||||
httpServer.on('listening', () => {
|
||||
const info = httpServer.address();
|
||||
console.info(`Up and running (and listening on ${info.address}:${info.port})...`);
|
||||
});
|
||||
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
protected createBuildCreator(buildsDir: string, githubToken: string, repoSlug: string,
|
||||
domainName: string): BuildCreator {
|
||||
const buildCreator = new BuildCreator(buildsDir);
|
||||
const githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||
const postPreviewsComment = (pr: number, shas: string[]) => {
|
||||
const body = shas.
|
||||
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
|
||||
join('\n');
|
||||
|
||||
return githubPullRequests.addComment(pr, body);
|
||||
};
|
||||
|
||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
|
||||
if (isPublic) {
|
||||
postPreviewsComment(pr, [sha]);
|
||||
}
|
||||
});
|
||||
|
||||
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
|
||||
if (isPublic && shas.length) {
|
||||
postPreviewsComment(pr, shas);
|
||||
}
|
||||
});
|
||||
|
||||
return buildCreator;
|
||||
}
|
||||
|
||||
protected createMiddleware(buildVerifier: BuildVerifier, buildCreator: BuildCreator): express.Express {
|
||||
const middleware = express();
|
||||
const jsonParser = bodyParser.json();
|
||||
|
||||
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
|
||||
const pr = req.params[0];
|
||||
const sha = req.params[1];
|
||||
const archive = req.header(X_FILE_HEADER);
|
||||
const authHeader = req.header(AUTHORIZATION_HEADER);
|
||||
|
||||
if (!authHeader) {
|
||||
this.throwRequestError(401, `Missing or empty '${AUTHORIZATION_HEADER}' header`, req);
|
||||
} else if (!archive) {
|
||||
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
|
||||
} else {
|
||||
Promise.resolve().
|
||||
then(() => buildVerifier.verify(+pr, authHeader)).
|
||||
then(verStatus => verStatus === BUILD_VERIFICATION_STATUS.verifiedAndTrusted).
|
||||
then(isPublic => buildCreator.create(pr, sha, archive, isPublic).
|
||||
then(() => res.sendStatus(isPublic ? 201 : 202))).
|
||||
catch(err => this.respondWithError(res, err));
|
||||
}
|
||||
});
|
||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||
middleware.post(/^\/pr-updated\/?$/, jsonParser, (req, res) => {
|
||||
const {action, number: prNo}: {action?: string, number?: number} = req.body;
|
||||
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
|
||||
|
||||
if (!visMayHaveChanged) {
|
||||
res.sendStatus(200);
|
||||
} else if (!prNo) {
|
||||
this.throwRequestError(400, `Missing or empty 'number' field`, req);
|
||||
} else {
|
||||
Promise.resolve().
|
||||
then(() => buildVerifier.getPrIsTrusted(prNo)).
|
||||
then(isPublic => buildCreator.updatePrVisibility(String(prNo), isPublic)).
|
||||
then(() => res.sendStatus(200)).
|
||||
catch(err => this.respondWithError(res, err));
|
||||
}
|
||||
});
|
||||
middleware.all('*', req => this.throwRequestError(404, 'Unknown resource', req));
|
||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
|
||||
|
||||
return middleware;
|
||||
}
|
||||
|
||||
protected respondWithError(res: express.Response, err: any) {
|
||||
if (!(err instanceof UploadError)) {
|
||||
err = new UploadError(500, String((err && err.message) || err));
|
||||
}
|
||||
|
||||
const statusText = http.STATUS_CODES[err.status] || '???';
|
||||
console.error(`Upload error: ${err.status} - ${statusText}`);
|
||||
console.error(err.message);
|
||||
|
||||
res.status(err.status).end(err.message);
|
||||
}
|
||||
|
||||
protected throwRequestError(status: number, error: string, req: express.Request) {
|
||||
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
|
||||
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
|
||||
|
||||
throw new UploadError(status, message);
|
||||
}
|
||||
}
|
||||
|
||||
// Exports
|
||||
export const uploadServerFactory = new UploadServerFactory();
|
@ -1,37 +1,16 @@
|
||||
export const enum BuildNums {
|
||||
BUILD_INFO_ERROR = 1,
|
||||
BUILD_INFO_404,
|
||||
BUILD_INFO_BUILD_FAILED,
|
||||
BUILD_INFO_INVALID_GH_ORG,
|
||||
BUILD_INFO_INVALID_GH_REPO,
|
||||
CHANGED_FILES_ERROR,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
BUILD_ARTIFACTS_ERROR,
|
||||
BUILD_ARTIFACTS_404,
|
||||
BUILD_ARTIFACTS_EMPTY,
|
||||
BUILD_ARTIFACTS_MISSING,
|
||||
DOWNLOAD_ARTIFACT_ERROR,
|
||||
DOWNLOAD_ARTIFACT_404,
|
||||
DOWNLOAD_ARTIFACT_TOO_BIG,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
// Using the values below, we can fake the response of the corresponding methods in tests. This is
|
||||
// necessary, because the test upload-server will be running as a separate node process, so we will
|
||||
// not have direct access to the code (e.g. for mocking).
|
||||
// (See also 'lib/verify-setup/start-test-upload-server.ts'.)
|
||||
|
||||
export const enum PrNums {
|
||||
CHANGED_FILES_ERROR = 1,
|
||||
CHANGED_FILES_404,
|
||||
CHANGED_FILES_NONE,
|
||||
TRUST_CHECK_ERROR,
|
||||
TRUST_CHECK_UNTRUSTED,
|
||||
TRUST_CHECK_TRUSTED_LABEL,
|
||||
TRUST_CHECK_ACTIVE_TRUSTED_USER,
|
||||
TRUST_CHECK_INACTIVE_TRUSTED_USER,
|
||||
}
|
||||
/* tslint:disable: variable-name */
|
||||
|
||||
export const SHA = '1234567890'.repeat(4);
|
||||
export const ALT_SHA = 'abcde'.repeat(8);
|
||||
export const SIMILAR_SHA = SHA.slice(0, -1) + 'A';
|
||||
// Special values to be used as `authHeader` in `BuildVerifier#verify()`.
|
||||
export const BV_verify_error = 'FAKE_VERIFICATION_ERROR';
|
||||
export const BV_verify_verifiedNotTrusted = 'FAKE_VERIFIED_NOT_TRUSTED';
|
||||
|
||||
// Special values to be used as `pr` in `BuildVerifier#getPrIsTrusted()`.
|
||||
export const BV_getPrIsTrusted_error = 32203;
|
||||
export const BV_getPrIsTrusted_notTrusted = 72457;
|
||||
|
||||
/* tslint:enable: variable-name */
|
||||
|
@ -1,10 +0,0 @@
|
||||
declare module 'delete-empty' {
|
||||
interface Options {
|
||||
dryRun: boolean;
|
||||
verbose: boolean;
|
||||
filter: (filePath: string) => boolean;
|
||||
}
|
||||
export default function deleteEmpty(cwd: string, options?: Options): Promise<string[]>;
|
||||
export default function deleteEmpty(cwd: string, options?: Options, callback?: (err: any, deleted: string[]) => void): void;
|
||||
export function sync(cwd: string, options?: Options): string[];
|
||||
}
|
@ -1,16 +1,21 @@
|
||||
// Imports
|
||||
import * as cp from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as http from 'http';
|
||||
import * as path from 'path';
|
||||
import * as shell from 'shelljs';
|
||||
import {AIO_DOWNLOADS_DIR, HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||
import {
|
||||
AIO_BUILDS_DIR,
|
||||
AIO_NGINX_PORT_HTTP,
|
||||
AIO_NGINX_PORT_HTTPS,
|
||||
AIO_WWW_USER,
|
||||
} from '../common/env-variables';
|
||||
import {computeShortSha, Logger} from '../common/utils';
|
||||
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants';
|
||||
import {getEnvVar} from '../common/utils';
|
||||
|
||||
// Constans
|
||||
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR');
|
||||
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME');
|
||||
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP');
|
||||
const TEST_AIO_NGINX_PORT_HTTPS = +getEnvVar('TEST_AIO_NGINX_PORT_HTTPS');
|
||||
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
|
||||
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
|
||||
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
|
||||
const WWW_USER = getEnvVar('AIO_WWW_USER');
|
||||
|
||||
// Interfaces - Types
|
||||
export interface CmdResult { success: boolean; err: Error | null; stdout: string; stderr: string; }
|
||||
@ -22,50 +27,61 @@ export type VerifyCmdResultFn = (result: CmdResult) => void;
|
||||
|
||||
// Classes
|
||||
class Helper {
|
||||
// Properties - Public
|
||||
public get buildsDir() { return TEST_AIO_BUILDS_DIR; }
|
||||
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
|
||||
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
|
||||
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
|
||||
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
|
||||
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
|
||||
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
|
||||
public get wwwUser() { return WWW_USER; }
|
||||
|
||||
// Properties - Protected
|
||||
protected cleanUpFns: CleanUpFn[] = [];
|
||||
protected portPerScheme: {[scheme: string]: number} = {
|
||||
http: AIO_NGINX_PORT_HTTP,
|
||||
https: AIO_NGINX_PORT_HTTPS,
|
||||
http: this.nginxPortHttp,
|
||||
https: this.nginxPortHttps,
|
||||
};
|
||||
|
||||
private logger = new Logger('TestHelper');
|
||||
|
||||
// Constructor
|
||||
constructor() {
|
||||
shell.mkdir('-p', AIO_BUILDS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_BUILDS_DIR}`);
|
||||
shell.mkdir('-p', AIO_DOWNLOADS_DIR);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${AIO_DOWNLOADS_DIR}`);
|
||||
shell.mkdir('-p', this.buildsDir);
|
||||
shell.exec(`chown -R ${this.wwwUser} ${this.buildsDir}`);
|
||||
}
|
||||
|
||||
// Methods - Public
|
||||
public cleanUp(): void {
|
||||
public buildExists(pr: string, sha = '', isPublic = true, legacy = false): boolean {
|
||||
const prDir = this.getPrDir(pr, isPublic);
|
||||
const dir = !sha ? prDir : this.getShaDir(prDir, sha, legacy);
|
||||
return fs.existsSync(dir);
|
||||
}
|
||||
|
||||
public cleanUp() {
|
||||
while (this.cleanUpFns.length) {
|
||||
// Clean-up fns remove themselves from the list.
|
||||
this.cleanUpFns[0]();
|
||||
}
|
||||
|
||||
const leftoverDownloads = fs.readdirSync(AIO_DOWNLOADS_DIR);
|
||||
const leftoverBuilds = fs.readdirSync(AIO_BUILDS_DIR);
|
||||
|
||||
if (leftoverDownloads.length) {
|
||||
this.logger.log(`Downloads directory '${AIO_DOWNLOADS_DIR}' is not empty after clean-up.`, leftoverDownloads);
|
||||
shell.rm('-rf', `${AIO_DOWNLOADS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length) {
|
||||
this.logger.log(`Builds directory '${AIO_BUILDS_DIR}' is not empty after clean-up.`, leftoverBuilds);
|
||||
shell.rm('-rf', `${AIO_BUILDS_DIR}/*`);
|
||||
}
|
||||
|
||||
if (leftoverBuilds.length || leftoverDownloads.length) {
|
||||
throw new Error(`Unexpected test files not cleaned up.`);
|
||||
if (fs.readdirSync(this.buildsDir).length) {
|
||||
throw new Error(`Directory '${this.buildsDir}' is not empty after clean-up.`);
|
||||
}
|
||||
}
|
||||
|
||||
public createDummyBuild(pr: number, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
|
||||
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn {
|
||||
const inputDir = this.getShaDir(this.getPrDir(`uploaded/${pr}`, true), sha);
|
||||
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
|
||||
const cmd2 = `chown ${this.wwwUser} ${archivePath}`;
|
||||
|
||||
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true, true);
|
||||
shell.exec(cmd1);
|
||||
shell.exec(cmd2);
|
||||
cleanUpTemp();
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
|
||||
}
|
||||
|
||||
public createDummyBuild(pr: string, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
|
||||
const prDir = this.getPrDir(pr, isPublic);
|
||||
const shaDir = this.getShaDir(prDir, sha, legacy);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
@ -73,38 +89,63 @@ class Helper {
|
||||
|
||||
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
||||
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
||||
shell.exec(`chown -R ${AIO_WWW_USER} ${prDir}`);
|
||||
shell.exec(`chown -R ${this.wwwUser} ${prDir}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
||||
}
|
||||
|
||||
public getPrDir(pr: number, isPublic: boolean): string {
|
||||
const prDirName = isPublic ? '' + pr : HIDDEN_DIR_PREFIX + pr;
|
||||
return path.join(AIO_BUILDS_DIR, prDirName);
|
||||
public deletePrDir(pr: string, isPublic = true) {
|
||||
const prDir = this.getPrDir(pr, isPublic);
|
||||
|
||||
if (fs.existsSync(prDir)) {
|
||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||
(shell as any).chmod('-R', 'a+w', prDir);
|
||||
shell.rm('-rf', prDir);
|
||||
}
|
||||
}
|
||||
|
||||
public getPrDir(pr: string, isPublic: boolean): string {
|
||||
const prDirName = isPublic ? pr : HIDDEN_DIR_PREFIX + pr;
|
||||
return path.join(this.buildsDir, prDirName);
|
||||
}
|
||||
|
||||
public getShaDir(prDir: string, sha: string, legacy = false): string {
|
||||
return path.join(prDir, legacy ? sha : computeShortSha(sha));
|
||||
return path.join(prDir, legacy ? sha : this.getShordSha(sha));
|
||||
}
|
||||
|
||||
public readBuildFile(pr: number, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
|
||||
public getShordSha(sha: string): string {
|
||||
return sha.substr(0, SHORT_SHA_LEN);
|
||||
}
|
||||
|
||||
public readBuildFile(pr: string, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
return fs.readFileSync(absFilePath, 'utf8');
|
||||
}
|
||||
|
||||
public runCmd(cmd: string, opts: cp.ExecOptions = {}): Promise<CmdResult> {
|
||||
public runCmd(cmd: string, opts: cp.ExecFileOptions = {}): Promise<CmdResult> {
|
||||
return new Promise(resolve => {
|
||||
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
|
||||
this.createCleanUpFn(() => proc.kill());
|
||||
});
|
||||
}
|
||||
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory): void {
|
||||
Object.entries(this.portPerScheme).forEach(([scheme, port]) => suiteFactory(scheme, port));
|
||||
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory) {
|
||||
Object.keys(this.portPerScheme).forEach(scheme => suiteFactory(scheme, this.portPerScheme[scheme]));
|
||||
}
|
||||
|
||||
public verifyResponse(status: number, regex: string | RegExp = /^/): VerifyCmdResultFn {
|
||||
public verifyResponse(status: number | [number, string], regex = /^/): VerifyCmdResultFn {
|
||||
let statusCode: number;
|
||||
let statusText: string;
|
||||
|
||||
if (Array.isArray(status)) {
|
||||
statusCode = status[0];
|
||||
statusText = status[1];
|
||||
} else {
|
||||
statusCode = status;
|
||||
statusText = http.STATUS_CODES[statusCode] || 'UNKNOWN_STATUS_CODE';
|
||||
}
|
||||
|
||||
return (result: CmdResult) => {
|
||||
const [headers, body] = result.stdout.
|
||||
split(/(?:\r?\n){2,}/).
|
||||
@ -113,25 +154,25 @@ class Helper {
|
||||
// Only keep the last to sections (final headers and body).
|
||||
|
||||
if (!result.success) {
|
||||
this.logger.log('Stdout:', result.stdout);
|
||||
this.logger.error('Stderr:', result.stderr);
|
||||
this.logger.error('Error:', result.err);
|
||||
console.log('Stdout:', result.stdout);
|
||||
console.log('Stderr:', result.stderr);
|
||||
console.log('Error:', result.err);
|
||||
}
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(headers).toMatch(new RegExp(`HTTP/(?:1\\.1|2) ${status} `));
|
||||
expect(headers).toContain(`${statusCode} ${statusText}`);
|
||||
expect(body).toMatch(regex);
|
||||
};
|
||||
}
|
||||
|
||||
public writeBuildFile(pr: number, sha: string, relFilePath: string, content: string, isPublic = true,
|
||||
legacy = false): void {
|
||||
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string, isPublic = true,
|
||||
legacy = false): CleanUpFn {
|
||||
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||
const absFilePath = path.join(shaDir, relFilePath);
|
||||
this.writeFile(absFilePath, {content}, true);
|
||||
return this.writeFile(absFilePath, {content}, true);
|
||||
}
|
||||
|
||||
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): void {
|
||||
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): CleanUpFn {
|
||||
if (!force && fs.existsSync(filePath)) {
|
||||
throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
|
||||
}
|
||||
@ -149,7 +190,9 @@ class Helper {
|
||||
// Create a file with the specified content.
|
||||
fs.writeFileSync(filePath, content || '');
|
||||
}
|
||||
shell.exec(`chown ${AIO_WWW_USER} ${filePath}`);
|
||||
shell.exec(`chown ${this.wwwUser} ${filePath}`);
|
||||
|
||||
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
|
||||
}
|
||||
|
||||
// Methods - Protected
|
||||
@ -168,70 +211,5 @@ class Helper {
|
||||
}
|
||||
}
|
||||
|
||||
interface DefaultCurlOptions {
|
||||
defaultMethod?: CurlOptions['method'];
|
||||
defaultOptions?: CurlOptions['options'];
|
||||
defaultHeaders?: CurlOptions['headers'];
|
||||
defaultData?: CurlOptions['data'];
|
||||
defaultExtraPath?: CurlOptions['extraPath'];
|
||||
}
|
||||
|
||||
interface CurlOptions {
|
||||
method?: string;
|
||||
options?: string;
|
||||
headers?: string[];
|
||||
data?: any;
|
||||
url?: string;
|
||||
extraPath?: string;
|
||||
}
|
||||
|
||||
export function makeCurl(baseUrl: string, {
|
||||
defaultMethod = 'POST',
|
||||
defaultOptions = '',
|
||||
defaultHeaders = ['Content-Type: application/json'],
|
||||
defaultData = {},
|
||||
defaultExtraPath = '',
|
||||
}: DefaultCurlOptions = {}) {
|
||||
return function curl({
|
||||
method = defaultMethod,
|
||||
options = defaultOptions,
|
||||
headers = defaultHeaders,
|
||||
data = defaultData,
|
||||
url = baseUrl,
|
||||
extraPath = defaultExtraPath,
|
||||
}: CurlOptions) {
|
||||
const dataString = data ? JSON.stringify(data) : '';
|
||||
const cmd = `curl -iLX ${method} ` +
|
||||
`${options} ` +
|
||||
headers.map(header => `--header "${header}" `).join('') +
|
||||
`--data '${dataString}' ` +
|
||||
`${url}${extraPath}`;
|
||||
return helper.runCmd(cmd);
|
||||
};
|
||||
}
|
||||
|
||||
export interface PayloadData {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: number,
|
||||
build_parameters: {
|
||||
CIRCLE_JOB: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function payload(buildNum: number): PayloadData {
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
build_num: buildNum,
|
||||
build_parameters: { CIRCLE_JOB: 'aio_preview' },
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Exports
|
||||
export const helper = new Helper();
|
||||
|
@ -1,7 +0,0 @@
|
||||
declare module jasmine {
|
||||
interface Matchers {
|
||||
toExistAsAFile(remove = true): boolean;
|
||||
toExistAsABuild(remove = true): boolean;
|
||||
toExistAsAnArtifact(remove = true): boolean;
|
||||
}
|
||||
}
|
@ -1,88 +0,0 @@
|
||||
import {sync as deleteEmpty} from 'delete-empty';
|
||||
import {existsSync, unlinkSync} from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_DOWNLOADS_DIR} from '../common/constants';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {SHA} from './constants';
|
||||
import {helper} from './helper';
|
||||
|
||||
function checkFile(filePath: string, remove: boolean): boolean {
|
||||
const exists = existsSync(filePath);
|
||||
if (exists && remove) {
|
||||
// if we expected the file to exist then we remove it to prevent leftover file errors
|
||||
unlinkSync(filePath);
|
||||
}
|
||||
return exists;
|
||||
}
|
||||
|
||||
function getArtifactPath(prNum: number, sha: string = SHA): string {
|
||||
return `${AIO_DOWNLOADS_DIR}/${prNum}-${computeShortSha(sha)}-aio-snapshot.tgz`;
|
||||
}
|
||||
|
||||
function checkFiles(prNum: number, isPublic: boolean, sha: string, isLegacy: boolean, remove: boolean) {
|
||||
const files = ['/index.html', '/foo/bar.js'];
|
||||
const prPath = helper.getPrDir(prNum, isPublic);
|
||||
const shaPath = helper.getShaDir(prPath, sha, isLegacy);
|
||||
|
||||
const existingFiles: string[] = [];
|
||||
const missingFiles: string[] = [];
|
||||
files
|
||||
.map(file => join(shaPath, file))
|
||||
.forEach(file => (checkFile(file, remove) ? existingFiles : missingFiles).push(file));
|
||||
|
||||
deleteEmpty(prPath);
|
||||
|
||||
return { existingFiles, missingFiles };
|
||||
}
|
||||
|
||||
class ToExistAsAFile implements jasmine.CustomMatcher {
|
||||
public compare(actual: string, remove = true): jasmine.CustomMatcherResult {
|
||||
const pass = checkFile(actual, remove);
|
||||
return {
|
||||
message: `Expected file at "${actual}" ${pass ? 'not' : ''} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsAnArtifact implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, sha?: string}, remove = true): jasmine.CustomMatcherResult {
|
||||
const { prNum, sha = SHA } = actual;
|
||||
const filePath = getArtifactPath(prNum, sha);
|
||||
const pass = checkFile(filePath, remove);
|
||||
return {
|
||||
message: `Expected artifact "PR:${prNum}, SHA:${sha}, FILE:${filePath}" ${pass ? 'not' : '\b'} to exist`,
|
||||
pass,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ToExistAsABuild implements jasmine.CustomMatcher {
|
||||
public compare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}, remove = true):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const {missingFiles} = checkFiles(prNum, isPublic, sha, isLegacy, remove);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" to exist:\n` +
|
||||
missingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: missingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
public negativeCompare(actual: {prNum: number, isPublic?: boolean, sha?: string, isLegacy?: boolean}):
|
||||
jasmine.CustomMatcherResult {
|
||||
const {prNum, isPublic = true, sha = SHA, isLegacy = false} = actual;
|
||||
const { existingFiles } = checkFiles(prNum, isPublic, sha, isLegacy, false);
|
||||
return {
|
||||
message: `Expected files for build "PR:${prNum}, SHA:${sha}" not to exist:\n` +
|
||||
existingFiles.map(file => ` - ${file}`).join('\n'),
|
||||
pass: existingFiles.length === 0,
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const customMatchers = {
|
||||
toExistAsABuild: () => new ToExistAsABuild(),
|
||||
toExistAsAFile: () => new ToExistAsAFile(),
|
||||
toExistAsAnArtifact: () => new ToExistAsAnArtifact(),
|
||||
};
|
@ -1,171 +0,0 @@
|
||||
/* tslint:disable:max-line-length */
|
||||
import * as nock from 'nock';
|
||||
import * as tar from 'tar-stream';
|
||||
import {gzipSync} from 'zlib';
|
||||
import {getEnvVar, Logger} from '../common/utils';
|
||||
import {BuildNums, PrNums, SHA} from './constants';
|
||||
|
||||
// We are using the `nock` library to fake responses from REST requests, when testing.
|
||||
// This is necessary, because the test preview-server runs as a separate node process to
|
||||
// the test harness, so we do not have direct access to the code (e.g. for mocking).
|
||||
// (See also 'lib/verify-setup/start-test-preview-server.ts'.)
|
||||
|
||||
// Each of the potential requests to an external API (e.g. Github or CircleCI) are mocked
|
||||
// below and return a suitable response. This is quite complicated to setup since the
|
||||
// response from, say, CircleCI will affect what request is made to, say, Github.
|
||||
|
||||
const logger = new Logger('mock-external-apis');
|
||||
|
||||
const log = (...args: any[]) => {
|
||||
// Filter out non-matching URL checks
|
||||
if (!/^matching.+: false$/.test(args[0])) {
|
||||
logger.log(...args);
|
||||
}
|
||||
};
|
||||
|
||||
const AIO_CIRCLE_CI_TOKEN = getEnvVar('AIO_CIRCLE_CI_TOKEN');
|
||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||
|
||||
const AIO_ARTIFACT_PATH = getEnvVar('AIO_ARTIFACT_PATH');
|
||||
const AIO_GITHUB_ORGANIZATION = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||
const AIO_GITHUB_REPO = getEnvVar('AIO_GITHUB_REPO');
|
||||
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||
const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||
|
||||
const ACTIVE_TRUSTED_USER = 'active-trusted-user';
|
||||
const INACTIVE_TRUSTED_USER = 'inactive-trusted-user';
|
||||
const UNTRUSTED_USER = 'untrusted-user';
|
||||
|
||||
const BASIC_BUILD_INFO = {
|
||||
branch: `pull/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
failed: false,
|
||||
reponame: AIO_GITHUB_REPO,
|
||||
username: AIO_GITHUB_ORGANIZATION,
|
||||
vcs_revision: SHA,
|
||||
};
|
||||
|
||||
const ISSUE_INFO_TRUSTED_LABEL = { labels: [{ name: AIO_TRUSTED_PR_LABEL }], user: { login: UNTRUSTED_USER } };
|
||||
const ISSUE_INFO_ACTIVE_TRUSTED_USER = { labels: [], user: { login: ACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_INACTIVE_TRUSTED_USER = { labels: [], user: { login: INACTIVE_TRUSTED_USER } };
|
||||
const ISSUE_INFO_UNTRUSTED = { labels: [], user: { login: UNTRUSTED_USER } };
|
||||
const ACTIVE_STATE = { state: 'active' };
|
||||
const INACTIVE_STATE = { state: 'inactive' };
|
||||
|
||||
const TEST_TEAM_INFO = AIO_GITHUB_TEAM_SLUGS.map((slug, index) => ({ slug, id: index }));
|
||||
|
||||
const CIRCLE_CI_API_HOST = 'https://circleci.com';
|
||||
const CIRCLE_CI_TOKEN_PARAM = `circle-token=${AIO_CIRCLE_CI_TOKEN}`;
|
||||
const ARTIFACT_1 = { path: 'artifact-1', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-1`, _urlPath: '/artifacts/artifact-1' };
|
||||
const ARTIFACT_2 = { path: 'artifact-2', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-2`, _urlPath: '/artifacts/artifact-2' };
|
||||
const ARTIFACT_3 = { path: 'artifact-3', url: `${CIRCLE_CI_API_HOST}/artifacts/artifact-3`, _urlPath: '/artifacts/artifact-3' };
|
||||
const ARTIFACT_ERROR = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/error`, _urlPath: '/artifacts/error' };
|
||||
const ARTIFACT_404 = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/404`, _urlPath: '/artifacts/404' };
|
||||
const ARTIFACT_VALID_TRUSTED_USER = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/user`, _urlPath: '/artifacts/valid/user' };
|
||||
const ARTIFACT_VALID_TRUSTED_LABEL = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/label`, _urlPath: '/artifacts/valid/label' };
|
||||
const ARTIFACT_VALID_UNTRUSTED = { path: AIO_ARTIFACT_PATH, url: `${CIRCLE_CI_API_HOST}/artifacts/valid/untrusted`, _urlPath: '/artifacts/valid/untrusted' };
|
||||
|
||||
const CIRCLE_CI_BUILD_INFO_URL = `/api/v1.1/project/github/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}`;
|
||||
|
||||
const buildInfoUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildArtifactsUrl = (buildNum: number) => `${CIRCLE_CI_BUILD_INFO_URL}/${buildNum}/artifacts?${CIRCLE_CI_TOKEN_PARAM}`;
|
||||
const buildInfo = (prNum: number) => ({ ...BASIC_BUILD_INFO, branch: `pull/${prNum}` });
|
||||
|
||||
const GITHUB_API_HOST = 'https://api.github.com';
|
||||
const GITHUB_ISSUES_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/issues`;
|
||||
const GITHUB_PULLS_URL = `/repos/${AIO_GITHUB_ORGANIZATION}/${AIO_GITHUB_REPO}/pulls`;
|
||||
const GITHUB_TEAMS_URL = `/orgs/${AIO_GITHUB_ORGANIZATION}/teams`;
|
||||
|
||||
const getIssueUrl = (prNum: number) => `${GITHUB_ISSUES_URL}/${prNum}`;
|
||||
const getFilesUrl = (prNum: number, pageNum = 1) => `${GITHUB_PULLS_URL}/${prNum}/files?page=${pageNum}&per_page=100`;
|
||||
const getCommentUrl = (prNum: number) => `${getIssueUrl(prNum)}/comments`;
|
||||
const getTeamMembershipUrl = (teamId: number, username: string) => `/teams/${teamId}/memberships/${username}`;
|
||||
|
||||
const createArchive = (buildNum: number, prNum: number, sha: string) => {
|
||||
logger.log('createArchive', buildNum, prNum, sha);
|
||||
const pack = tar.pack();
|
||||
pack.entry({name: 'index.html'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /index.html`);
|
||||
pack.entry({name: 'foo/bar.js'}, `BUILD: ${buildNum} | PR: ${prNum} | SHA: ${sha} | File: /foo/bar.js`);
|
||||
pack.finalize();
|
||||
const zip = gzipSync(pack.read());
|
||||
return zip;
|
||||
};
|
||||
|
||||
// Create request scopes
|
||||
const circleCiApi = nock(CIRCLE_CI_API_HOST).log(log).persist();
|
||||
const githubApi = nock(GITHUB_API_HOST).log(log).persist().matchHeader('Authorization', `token ${AIO_GITHUB_TOKEN}`);
|
||||
|
||||
//////////////////////////////
|
||||
|
||||
// GENERAL responses
|
||||
githubApi.get(GITHUB_TEAMS_URL + '?page=1&per_page=100').reply(200, TEST_TEAM_INFO);
|
||||
githubApi.post(getCommentUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200);
|
||||
|
||||
// BUILD_INFO errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_ERROR)).replyWithError('BUILD_INFO_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_404)).reply(404, 'BUILD_INFO_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_BUILD_FAILED)).reply(200, { ...BASIC_BUILD_INFO, failed: true });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_ORG)).reply(200, { ...BASIC_BUILD_INFO, username: 'bad' });
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_INFO_INVALID_GH_REPO)).reply(200, { ...BASIC_BUILD_INFO, reponame: 'bad' });
|
||||
|
||||
// CHANGED FILE errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_ERROR)).reply(200, buildInfo(PrNums.CHANGED_FILES_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_ERROR)).replyWithError('CHANGED_FILES_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_404)).reply(200, buildInfo(PrNums.CHANGED_FILES_404));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_404)).reply(404, 'CHANGED_FILES_404');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.CHANGED_FILES_NONE)).reply(200, buildInfo(PrNums.CHANGED_FILES_NONE));
|
||||
githubApi.get(getFilesUrl(PrNums.CHANGED_FILES_NONE)).reply(200, []);
|
||||
|
||||
// ARTIFACT URL errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_ERROR)).replyWithError('BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_404)).reply(404, 'BUILD_ARTIFACTS_ERROR');
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_EMPTY)).reply(200, []);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.BUILD_ARTIFACTS_MISSING)).reply(200, [ARTIFACT_1, ARTIFACT_2, ARTIFACT_3]);
|
||||
|
||||
// ARTIFACT DOWNLOAD errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).reply(200, [ARTIFACT_ERROR]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).replyWithError(ARTIFACT_ERROR._urlPath);
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, buildInfo(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER));
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.DOWNLOAD_ARTIFACT_404)).reply(200, [ARTIFACT_404]);
|
||||
circleCiApi.get(ARTIFACT_ERROR._urlPath).reply(404, ARTIFACT_ERROR._urlPath);
|
||||
|
||||
// TRUST CHECK errors
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, buildInfo(PrNums.TRUST_CHECK_ERROR));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ERROR)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ERROR)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ERROR)).replyWithError('TRUST_CHECK_ERROR');
|
||||
|
||||
// ACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_USER._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_ACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// TRUSTED LABEL response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, [ARTIFACT_VALID_TRUSTED_LABEL]);
|
||||
circleCiApi.get(ARTIFACT_VALID_TRUSTED_LABEL._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_TRUSTED_LABEL, PrNums.TRUST_CHECK_TRUSTED_LABEL, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_TRUSTED_LABEL)).reply(200, ISSUE_INFO_TRUSTED_LABEL);
|
||||
githubApi.get(getTeamMembershipUrl(0, ACTIVE_TRUSTED_USER)).reply(200, ACTIVE_STATE);
|
||||
|
||||
// INACTIVE TRUSTED USER response
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, BASIC_BUILD_INFO);
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, [ARTIFACT_VALID_TRUSTED_USER]);
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER)).reply(200, ISSUE_INFO_INACTIVE_TRUSTED_USER);
|
||||
githubApi.get(getTeamMembershipUrl(0, INACTIVE_TRUSTED_USER)).reply(200, INACTIVE_STATE);
|
||||
|
||||
// UNTRUSTED reponse
|
||||
circleCiApi.get(buildInfoUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, buildInfo(PrNums.TRUST_CHECK_UNTRUSTED));
|
||||
githubApi.get(getFilesUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, [{ filename: 'aio/a' }]);
|
||||
circleCiApi.get(buildArtifactsUrl(BuildNums.TRUST_CHECK_UNTRUSTED)).reply(200, [ARTIFACT_VALID_UNTRUSTED]);
|
||||
circleCiApi.get(ARTIFACT_VALID_UNTRUSTED._urlPath).reply(200, createArchive(BuildNums.TRUST_CHECK_UNTRUSTED, PrNums.TRUST_CHECK_UNTRUSTED, SHA));
|
||||
githubApi.get(getIssueUrl(PrNums.TRUST_CHECK_UNTRUSTED)).reply(200, ISSUE_INFO_UNTRUSTED);
|
||||
githubApi.get(getTeamMembershipUrl(0, UNTRUSTED_USER)).reply(404);
|
@ -1,49 +1,44 @@
|
||||
// Imports
|
||||
import * as path from 'path';
|
||||
import {rm} from 'shelljs';
|
||||
import {AIO_BUILDS_DIR, AIO_NGINX_HOSTNAME, AIO_NGINX_PORT_HTTP, AIO_NGINX_PORT_HTTPS} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {PrNums} from './constants';
|
||||
import {helper as h} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe(`nginx`, () => {
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
it('should redirect HTTP to HTTPS', async () => {
|
||||
const httpHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTP}`;
|
||||
const httpsHost = `${AIO_NGINX_HOSTNAME}:${AIO_NGINX_PORT_HTTPS}`;
|
||||
it('should redirect HTTP to HTTPS', done => {
|
||||
const httpHost = `${h.nginxHostname}:${h.nginxPortHttp}`;
|
||||
const httpsHost = `${h.nginxHostname}:${h.nginxPortHttps}`;
|
||||
const urlMap = {
|
||||
[`http://${httpHost}/`]: `https://${httpsHost}/`,
|
||||
[`http://${httpHost}/foo`]: `https://${httpsHost}/foo`,
|
||||
[`http://foo.${httpHost}/`]: `https://foo.${httpsHost}/`,
|
||||
};
|
||||
|
||||
const verifyRedirection = async (fromUrl: string, toUrl: string) => {
|
||||
const result = await h.runCmd(`curl -i ${fromUrl}`);
|
||||
const verifyRedirection = (httpUrl: string) => h.runCmd(`curl -i ${httpUrl}`).then(result => {
|
||||
h.verifyResponse(307)(result);
|
||||
|
||||
const headers = result.stdout.split(/(?:\r?\n){2,}/)[0];
|
||||
expect(headers).toContain(`Location: ${toUrl}`);
|
||||
};
|
||||
expect(headers).toContain(`Location: ${urlMap[httpUrl]}`);
|
||||
});
|
||||
|
||||
await Promise.all(Object.entries(urlMap).map(urls => verifyRedirection(...urls)));
|
||||
Promise.
|
||||
all(Object.keys(urlMap).map(verifyRedirection)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const hostname = h.nginxHostname;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = 9;
|
||||
const pr = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
const shortSha9 = computeShortSha(sha9);
|
||||
const shortSha0 = computeShortSha(sha0);
|
||||
const shortSha9 = h.getShordSha(sha9);
|
||||
const shortSha0 = h.getShordSha(sha0);
|
||||
|
||||
|
||||
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
||||
@ -55,100 +50,95 @@ describe(`nginx`, () => {
|
||||
h.createDummyBuild(pr, sha0);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
expect({ prNum: pr, sha: sha9 }).toExistAsABuild();
|
||||
expect({ prNum: pr, sha: sha0 }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html', async () => {
|
||||
it('should return /index.html', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /index.html (for legacy builds)', async () => {
|
||||
it('should return /index.html (for legacy builds)', done => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js', async () => {
|
||||
it('should return /foo/bar.js', done => {
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex));
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should return /foo/bar.js (for legacy builds)', async () => {
|
||||
it('should return /foo/bar.js (for legacy builds)', done => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(pr, sha9, true, false, true);
|
||||
|
||||
await h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(h.verifyResponse(200, bodyRegex));
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isLegacy: true }).toExistAsABuild();
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).
|
||||
then(h.verifyResponse(200, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 403 for directories', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 403 for directories', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths to files', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404));
|
||||
it('should respond with 404 for unknown paths to files', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', async () => {
|
||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${origin}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown PRs/SHAs', async () => {
|
||||
it('should respond with 404 for unknown PRs/SHAs', done => {
|
||||
const otherPr = 54321;
|
||||
const otherShortSha = computeShortSha('8'.repeat(40));
|
||||
const otherShortSha = h.getShordSha('8'.repeat(40));
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}9.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherShortSha}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the subdomain format is wrong', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 404 if the subdomain format is wrong', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
@ -157,25 +147,26 @@ describe(`nginx`, () => {
|
||||
h.runCmd(`curl -iL ${scheme}://${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', async () => {
|
||||
await h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404));
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', async () => {
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -183,41 +174,39 @@ describe(`nginx`, () => {
|
||||
|
||||
describe('(for hidden builds)', () => {
|
||||
|
||||
it('should respond with 404 for any file or directory', async () => {
|
||||
it('should respond with 404 for any file or directory', done => {
|
||||
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false);
|
||||
expect(h.buildExists(pr, sha9, false)).toBe(true);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false }).toExistAsABuild();
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for any file or directory (for legacy builds)', async () => {
|
||||
it('should respond with 404 for any file or directory (for legacy builds)', done => {
|
||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||
const assert404 = h.verifyResponse(404);
|
||||
|
||||
h.createDummyBuild(pr, sha9, false, false, true);
|
||||
expect(h.buildExists(pr, sha9, false, true)).toBe(true);
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||
]);
|
||||
|
||||
expect({ prNum: pr, sha: sha9, isPublic: false, isLegacy: true }).toExistAsABuild();
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -227,97 +216,102 @@ describe(`nginx`, () => {
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const baseUrl = `${scheme}://${host}/can-have-public-preview`;
|
||||
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
||||
|
||||
it('should disallow non-POST requests', done => {
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX POST ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${baseUrl}/42`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX GET ${baseUrl}/${PrNums.CHANGED_FILES_ERROR}`).
|
||||
then(h.verifyResponse(500, /CHANGED_FILES_ERROR/));
|
||||
it(`should reject files larger than ${h.uploadMaxSize}B (according to header)`, done => {
|
||||
const headers = `--header "Content-Length: ${1.5 * h.uploadMaxSize}"`;
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
h.runCmd(`curl -iLX POST ${headers} ${url}`).
|
||||
then(h.verifyResponse([413, 'Request Entity Too Large'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const cmdPrefix = `curl -iLX GET ${baseUrl}`;
|
||||
it(`should reject files larger than ${h.uploadMaxSize}B (without header)`, done => {
|
||||
const filePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}-foo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}nfoo/42`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/42/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/f00`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
h.writeFile(filePath, {size: 1.5 * h.uploadMaxSize});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const url = `${scheme}://${host}/circle-build`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
h.runCmd(`curl -iLX POST --data-binary "@${filePath}" ${url}`).
|
||||
then(h.verifyResponse([413, 'Request Entity Too Large'])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST ${scheme}://${host}/circle-build`).
|
||||
then(h.verifyResponse(400, /Incorrect body content. Expected JSON/));
|
||||
it('should pass requests through to the upload server', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(401, /Missing or empty 'AUTHORIZATION' header/)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-circle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncircle-build/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build-foo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-buildnfoo/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/pr`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/circle-build/42`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/0${pr}/${sha9}`).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}/create-build/${pr}`;
|
||||
const bodyRegex = /Missing or empty 'AUTHORIZATION' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/0${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/${sha0}`).then(h.verifyResponse(401, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -327,33 +321,42 @@ describe(`nginx`, () => {
|
||||
const url = `${scheme}://${host}/pr-updated`;
|
||||
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405)),
|
||||
]);
|
||||
it('should disallow non-POST requests', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should pass requests through to the preview server', async () => {
|
||||
await h.runCmd(`curl -iLX POST --header "Content-Type: application/json" ${url}`).
|
||||
then(h.verifyResponse(400, /Missing or empty 'number' field/));
|
||||
it('should pass requests through to the upload server', done => {
|
||||
const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`;
|
||||
|
||||
const cmd1 = `${cmdPrefix} ${url}`;
|
||||
const cmd2 = `${cmdPrefix} --data '{"number":${pr}}' ${url}`;
|
||||
const cmd3 = `${cmdPrefix} --data '{"number":${pr},"action":"foo"}' ${url}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)),
|
||||
h.runCmd(cmd2).then(h.verifyResponse(200)),
|
||||
h.runCmd(cmd3).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -361,15 +364,13 @@ describe(`nginx`, () => {
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
beforeEach(() => {
|
||||
it('should respond with 404 for unknown URLs (even if the resource exists)', done => {
|
||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
const absFilePath = path.join(h.buildsDir, relFilePath);
|
||||
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond with 404 for unknown URLs (even if the resource exists)', async () => {
|
||||
await Promise.all([
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
|
||||
@ -378,14 +379,7 @@ describe(`nginx`, () => {
|
||||
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
['index.html', 'foo.js', 'foo/index.html', 'foo'].forEach(relFilePath => {
|
||||
const absFilePath = path.join(AIO_BUILDS_DIR, relFilePath);
|
||||
rm('-r', absFilePath);
|
||||
});
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -1,568 +0,0 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import {join} from 'path';
|
||||
import {AIO_PREVIEW_SERVER_HOSTNAME, AIO_PREVIEW_SERVER_PORT, AIO_WWW_USER} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA, SIMILAR_SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
|
||||
// Tests
|
||||
describe('preview-server', () => {
|
||||
const hostname = AIO_PREVIEW_SERVER_HOSTNAME;
|
||||
const port = AIO_PREVIEW_SERVER_PORT;
|
||||
const host = `http://${hostname}:${port}`;
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000);
|
||||
beforeEach(() => jasmine.addMatchers(customMatchers));
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe(`${host}/can-have-public-preview`, () => {
|
||||
const curl = makeCurl(`${host}/can-have-public-preview`, {
|
||||
defaultData: null,
|
||||
defaultExtraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`,
|
||||
defaultHeaders: [],
|
||||
defaultMethod: 'GET',
|
||||
});
|
||||
|
||||
|
||||
it('should disallow non-GET requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'POST'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `-foo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `nfoo/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}/foo`}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/f00'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({extraPath: '/'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking for significant file changes fails', async () => {
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_404}`}).then(h.verifyResponse(500, /CHANGED_FILES_404/)),
|
||||
curl({extraPath: `/${PrNums.CHANGED_FILES_ERROR}`}).then(h.verifyResponse(500, /CHANGED_FILES_ERROR/)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if no significant files were touched', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'No significant files touched.',
|
||||
});
|
||||
|
||||
await curl({extraPath: `/${PrNums.CHANGED_FILES_NONE}`}).then(h.verifyResponse(200, expectedResponse));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 500 if checking "trusted" status fails', async () => {
|
||||
await curl({extraPath: `/${PrNums.TRUST_CHECK_ERROR}`}).then(h.verifyResponse(500, 'TRUST_CHECK_ERROR'));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (false) if the PR is not automatically verifiable as "trusted"', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: false,
|
||||
reason: 'Not automatically verifiable as \\"trusted\\".',
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_INACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_UNTRUSTED}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 200 (true) if the PR can have a public preview', async () => {
|
||||
const expectedResponse = JSON.stringify({
|
||||
canHavePublicPreview: true,
|
||||
reason: null,
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
curl({extraPath: `/${PrNums.TRUST_CHECK_TRUSTED_LABEL}`}).then(h.verifyResponse(200, expectedResponse)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/circle-build`, () => {
|
||||
const curl = makeCurl(`${host}/circle-build`);
|
||||
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
await Promise.all([
|
||||
curl({url: `${host}/foo/circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/foo-circle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/fooncircle-build`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build-foo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-buildnfoo`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build/pr`}).then(h.verifyResponse(404)),
|
||||
curl({url: `${host}/circle-build42`}).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 400 if the body is not valid', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: {} } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1 } } }).then(h.verifyResponse(400)),
|
||||
curl({ data: { payload: { build_num: 1, build_parameters: {} } } }).then(h.verifyResponse(400)),
|
||||
curl(payload(0)).then(h.verifyResponse(400)),
|
||||
curl(payload(-1)).then(h.verifyResponse(400)),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI API request errors', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_INFO_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if the build on CircleCI failed', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_BUILD_FAILED)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github org from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_ORG)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github repo from CircleCI does not match what is configured', async () => {
|
||||
await curl(payload(BuildNums.BUILD_INFO_INVALID_GH_REPO)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the github files API errors', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.CHANGED_FILES_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 204 if no significant files are changed by the PR', async () => {
|
||||
await curl(payload(BuildNums.CHANGED_FILES_NONE)).then(h.verifyResponse(204));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the CircleCI artifact API fails', async () => {
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_404)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_EMPTY)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.BUILD_ARTIFACTS_MISSING)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if fetching the artifact errors', async () => {
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_ERROR)).then(h.verifyResponse(500));
|
||||
await curl(payload(BuildNums.DOWNLOAD_ARTIFACT_404)).then(h.verifyResponse(500));
|
||||
});
|
||||
|
||||
it('should respond with 500 if the GH trusted API fails', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should respond with 201 if a new public build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER)).then(h.verifyResponse(201));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should respond with 202 if a new private build is created', async () => {
|
||||
await curl(payload(BuildNums.TRUST_CHECK_UNTRUSTED)).then(h.verifyResponse(202));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_UNTRUSTED, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
[true, false].forEach(isPublic => {
|
||||
const build = isPublic ? BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const prNum = isPublic ? PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER : PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
const label = isPublic ? 'public' : 'non-public';
|
||||
const overwriteRe = RegExp(`^Request to overwrite existing ${label} directory`);
|
||||
const statusCode = isPublic ? 201 : 202;
|
||||
|
||||
describe(`for ${label} builds`, () => {
|
||||
|
||||
it('should extract the contents of the build artifact', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /index.html`);
|
||||
expect(h.readBuildFile(prNum, SHA, 'foo/bar.js', isPublic))
|
||||
.toContain(`PR: ${prNum} | SHA: ${SHA} | File: /foo/bar.js`);
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should create files/directories owned by '${AIO_WWW_USER}'`, async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
const { stdout: allFiles } = await h.runCmd(`find ${shaDir}`);
|
||||
const { stdout: userFiles } = await h.runCmd(`find ${shaDir} -user ${AIO_WWW_USER}`);
|
||||
|
||||
expect(userFiles).toBe(allFiles);
|
||||
expect(userFiles).toContain(shaDir);
|
||||
expect(userFiles).toContain(join(shaDir, 'index.html'));
|
||||
expect(userFiles).toContain(join(shaDir, 'foo', 'bar.js'));
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should delete the build artifact file', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, SHA }).not.toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should make the build directory non-writable', async () => {
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||
const isNotWritable = (fileOrDir: string) => {
|
||||
const mode = fs.statSync(fileOrDir).mode;
|
||||
// tslint:disable-next-line: no-bitwise
|
||||
return !(mode & parseInt('222', 8));
|
||||
};
|
||||
|
||||
const shaDir = h.getShaDir(h.getPrDir(prNum, isPublic), SHA);
|
||||
expect(isNotWritable(shaDir)).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'index.html'))).toBe(true);
|
||||
expect(isNotWritable(join(shaDir, 'foo', 'bar.js'))).toBe(true);
|
||||
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)',
|
||||
async () => {
|
||||
// It is possible that 40-chars long build directories exist, if they had been deployed
|
||||
// before implementing the shorter build directory names. In that case, we don't want the
|
||||
// second (shorter) name to be considered the same as the old one (even if they originate
|
||||
// from the same SHA).
|
||||
|
||||
h.createDummyBuild(prNum, SHA, isPublic, false, true);
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic, true);
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
await curl(payload(build))
|
||||
.then(h.verifyResponse(statusCode));
|
||||
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, false)).toContain('index.html');
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: false }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: SHA, isLegacy: true }).toExistAsABuild();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds`, async () => {
|
||||
// setup a build already in place
|
||||
h.createDummyBuild(prNum, SHA, isPublic);
|
||||
// distinguish this build from the downloaded one
|
||||
h.writeBuildFile(prNum, SHA, 'index.html', 'My content', isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it(`should not overwrite existing builds (even if the SHA is different)`, async () => {
|
||||
// Since only the first few characters of the SHA are used, it is possible for two different
|
||||
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
|
||||
// overwrite the first.
|
||||
expect(SIMILAR_SHA).not.toEqual(SHA);
|
||||
expect(computeShortSha(SIMILAR_SHA)).toEqual(computeShortSha(SHA));
|
||||
h.createDummyBuild(prNum, SIMILAR_SHA, isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toContain('index.html');
|
||||
h.writeBuildFile(prNum, SIMILAR_SHA, 'index.html', 'My content', isPublic);
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, overwriteRe));
|
||||
expect(h.readBuildFile(prNum, SIMILAR_SHA, 'index.html', isPublic)).toBe('My content');
|
||||
expect({ prNum, isPublic, sha: SIMILAR_SHA }).toExistAsABuild();
|
||||
expect({ prNum, sha: SIMILAR_SHA }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
it('should only delete the SHA directory on error (for existing PR)', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
await curl(payload(BuildNums.TRUST_CHECK_ERROR)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ERROR }).toExistAsAnArtifact();
|
||||
expect({ prNum, isPublic, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
describe('when the PR\'s visibility has changed', () => {
|
||||
|
||||
it('should update the PR\'s visibility', async () => {
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(statusCode));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic, sha: ALT_SHA }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds (but keep the updated visibility)', async () => {
|
||||
h.createDummyBuild(prNum, SHA, !isPublic);
|
||||
await curl(payload(build)).then(h.verifyResponse(409));
|
||||
expect({ prNum, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, isPublic: !isPublic }).not.toExistAsABuild();
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
|
||||
|
||||
it('should reject the request if it fails to update the PR\'s visibility', async () => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(prNum, ALT_SHA, isPublic);
|
||||
h.createDummyBuild(prNum, ALT_SHA, !isPublic);
|
||||
|
||||
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(prNum, !isPublic)}' ` +
|
||||
`to existing directory '${h.getPrDir(prNum, isPublic)}'.`);
|
||||
|
||||
await curl(payload(build)).then(h.verifyResponse(409, errorRegex));
|
||||
|
||||
expect({ prNum, isPublic }).not.toExistAsABuild();
|
||||
|
||||
// The bad folders should have been deleted
|
||||
expect({ prNum, sha: ALT_SHA, isPublic }).toExistAsABuild();
|
||||
expect({ prNum, sha: ALT_SHA, isPublic: !isPublic }).toExistAsABuild();
|
||||
|
||||
// since it errored we didn't clear up the downloaded artifact - perhaps we should?
|
||||
expect({ prNum }).toExistAsAnArtifact();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL ${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', async () => {
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL ${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const curl = makeCurl(`${host}/pr-updated`);
|
||||
|
||||
it('should disallow non-POST requests', async () => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({method: 'GET'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PUT'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'PATCH'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
curl({method: 'DELETE'}).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a payload', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: '' }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a \'number\' field', async () => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
curl({ data: { number: null} }).then(h.verifyResponse(400, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests for which checking the PR visibility fails', async () => {
|
||||
await curl({ data: { number: PrNums.TRUST_CHECK_ERROR } }).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', async () => {
|
||||
const mockPayload = JSON.stringify({number: 1}); // MockExternalApiFlags.TRUST_CHECK_ACTIVE_TRUSTED_USER });
|
||||
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" ${host}`;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if PR\'s visibility is already up-to-date', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is already public.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild(remove);
|
||||
// Hidden build is already hidden.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if \'action\' implies no visibility change', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
const checkVisibilities = (remove: boolean) => {
|
||||
// Public build is hidden atm.
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(remove);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(remove);
|
||||
// Hidden build is public atm.
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(remove);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(remove);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
checkVisibilities(false);
|
||||
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'foo' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
checkVisibilities(true);
|
||||
});
|
||||
|
||||
|
||||
describe('when the visiblity has changed', () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create initial PR builds with opposite visibilities as the ones that will be reported:
|
||||
// - The now public PR was previously hidden.
|
||||
// - The now hidden PR was previously public.
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
|
||||
expect({ prNum: publicPr, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, isPublic: true }).toExistAsABuild(false);
|
||||
});
|
||||
afterEach(() => {
|
||||
// Expect PRs' visibility to have been updated:
|
||||
// - The public PR should be actually public (previously it was hidden).
|
||||
// - The hidden PR should be actually hidden (previously it was public).
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: undefined)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: labeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'labeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: unlabeled)', async () => {
|
||||
await Promise.all([
|
||||
curl({ data: {number: +publicPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
curl({ data: {number: +hiddenPr, action: 'unlabeled' } }).then(h.verifyResponse(200)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for requests to unknown URLs', async () => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
await Promise.all([
|
||||
h.runCmd(`curl -iL ${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PUT ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
@ -1,80 +1,101 @@
|
||||
// Imports
|
||||
import {AIO_NGINX_HOSTNAME} from '../common/env-variables';
|
||||
import {computeShortSha} from '../common/utils';
|
||||
import {ALT_SHA, BuildNums, PrNums, SHA} from './constants';
|
||||
import {helper as h, makeCurl, payload} from './helper';
|
||||
import {customMatchers} from './jasmine-custom-matchers';
|
||||
import * as path from 'path';
|
||||
import * as c from './constants';
|
||||
import {helper as h} from './helper';
|
||||
|
||||
// Tests
|
||||
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
|
||||
const hostname = AIO_NGINX_HOSTNAME;
|
||||
const hostname = h.nginxHostname;
|
||||
const host = `${hostname}:${port}`;
|
||||
const curlPrUpdated = makeCurl(`${scheme}://${host}/pr-updated`);
|
||||
const pr9 = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
|
||||
const getFile = (pr: number, sha: string, file: string) =>
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${computeShortSha(sha)}.${host}/${file}`);
|
||||
const prUpdated = (prNum: number, action?: string) => curlPrUpdated({ data: { number: prNum, action } });
|
||||
const circleBuild = makeCurl(`${scheme}://${host}/circle-build`);
|
||||
const getFile = (pr: string, sha: string, file: string) =>
|
||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${h.getShordSha(sha)}.${host}/${file}`);
|
||||
const uploadBuild = (pr: string, sha: string, archive: string, authHeader = 'Token FOO') => {
|
||||
const curlPost = `curl -iLX POST --header "Authorization: ${authHeader}"`;
|
||||
return h.runCmd(`${curlPost} --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
|
||||
};
|
||||
const prUpdated = (pr: number, action?: string) => {
|
||||
const url = `${scheme}://${host}/pr-updated`;
|
||||
const payloadStr = JSON.stringify({number: pr, action});
|
||||
return h.runCmd(`curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000;
|
||||
jasmine.addMatchers(customMatchers);
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => {
|
||||
h.deletePrDir(pr9);
|
||||
h.deletePrDir(pr9, false);
|
||||
h.cleanUp();
|
||||
});
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe('for a new/non-existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
it('should be able to upload and serve a public build', done => {
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
it('should be able to upload but not serve a hidden build', done => {
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
await Promise.all([
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
])).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||
expect(h.buildExists(pr9, sha9, false)).toBe(true);
|
||||
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
it('should reject an upload if verification fails', done => {
|
||||
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error).
|
||||
then(h.verifyResponse(403, errorRegex9)).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr9)).toBe(false);
|
||||
expect(h.buildExists(pr9, '', false)).toBe(false);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to notify that a PR has been updated (and do nothing)', async () => {
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(200));
|
||||
// The PR should still not exist.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).not.toExistAsABuild();
|
||||
it('should be able to notify that a PR has been updated (and do nothing)', done => {
|
||||
prUpdated(+pr9).
|
||||
then(h.verifyResponse(200)).
|
||||
then(() => {
|
||||
// The PR should still not exist.
|
||||
expect(h.buildExists(pr9, '', false)).toBe(false);
|
||||
expect(h.buildExists(pr9, '', true)).toBe(false);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
@ -82,186 +103,215 @@ h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme
|
||||
|
||||
describe('for an existing PR', () => {
|
||||
|
||||
it('should be able to create and serve a public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
it('should be able to upload and serve a public build', done => {
|
||||
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
||||
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
||||
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix1 = `^PR: ${PR} \\| SHA: ${ALT_SHA} \\| File:`;
|
||||
const idxContentRegex1 = new RegExp(`${regexPrefix1} \\/index\\.html$`);
|
||||
const barContentRegex1 = new RegExp(`${regexPrefix1} \\/foo\\/bar\\.js$`);
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix2 = `^BUILD: ${BUILD} \\| PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex2 = new RegExp(`${regexPrefix2} \\/index\\.html$`);
|
||||
const barContentRegex2 = new RegExp(`${regexPrefix2} \\/foo\\/bar\\.js$`);
|
||||
h.createDummyBuild(pr9, sha0);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(201));
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex1)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex1)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex2)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex2)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)),
|
||||
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)),
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to create but not serve a hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
it('should be able to upload but not serve a hidden build', done => {
|
||||
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
||||
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
||||
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(202));
|
||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
await Promise.all([
|
||||
getFile(PR, ALT_SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, ALT_SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(PR, SHA, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
]);
|
||||
h.createDummyBuild(pr9, sha0, false);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
expect({ prNum: PR, sha: SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: SHA, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||
])).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||
expect(h.buildExists(pr9, sha9, false)).toBe(true);
|
||||
expect(h.readBuildFile(pr9, sha0, 'index.html', false)).toMatch(idxContentRegex0);
|
||||
expect(h.readBuildFile(pr9, sha0, 'foo/bar.js', false)).toMatch(barContentRegex0);
|
||||
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject if verification fails', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ERROR;
|
||||
const PR = PrNums.TRUST_CHECK_ERROR;
|
||||
it('should reject an upload if verification fails', done => {
|
||||
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`);
|
||||
|
||||
h.createDummyBuild(PR, ALT_SHA, false);
|
||||
h.createDummyBuild(pr9, sha0);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(500));
|
||||
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error).
|
||||
then(h.verifyResponse(403, errorRegex9)).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr9)).toBe(true);
|
||||
expect(h.buildExists(pr9, sha0)).toBe(true);
|
||||
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||
}).
|
||||
then(done);
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: PR, sha: ALT_SHA, isPublic: false }).toExistAsABuild();
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing public preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const PR = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
it('should not be able to overwrite an existing public build', done => {
|
||||
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
const regexPrefix = `^PR: ${PR} \\| SHA: ${SHA} \\| File:`;
|
||||
const idxContentRegex = new RegExp(`${regexPrefix} \\/index\\.html$`);
|
||||
const barContentRegex = new RegExp(`${regexPrefix} \\/foo\\/bar\\.js$`);
|
||||
h.createDummyBuild(pr9, sha9);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
h.createDummyBuild(PR, SHA);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
await Promise.all([
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'index.html').then(h.verifyResponse(200, idxContentRegex)),
|
||||
getFile(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex)),
|
||||
]);
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR }).toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath).
|
||||
then(h.verifyResponse(409)).
|
||||
then(() => Promise.all([
|
||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||
])).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not be able to overwrite an existing hidden preview', async () => {
|
||||
const BUILD = BuildNums.TRUST_CHECK_UNTRUSTED;
|
||||
const PR = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
h.createDummyBuild(PR, SHA, false);
|
||||
it('should not be able to overwrite an existing hidden build', done => {
|
||||
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||
|
||||
await circleBuild(payload(BUILD)).then(h.verifyResponse(409));
|
||||
h.createDummyBuild(pr9, sha9, false);
|
||||
h.createDummyArchive(pr9, sha9, archivePath);
|
||||
|
||||
expect({ prNum: PR }).toExistAsAnArtifact();
|
||||
expect({ prNum: PR, isPublic: false }).toExistAsABuild();
|
||||
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||
then(h.verifyResponse(409)).
|
||||
then(() => {
|
||||
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if outdated)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
it('should be able to request re-checking visibility (if outdated)', done => {
|
||||
const publicPr = pr9;
|
||||
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, false);
|
||||
h.createDummyBuild(hiddenPr, SHA, true);
|
||||
h.createDummyBuild(publicPr, sha9, false);
|
||||
h.createDummyBuild(hiddenPr, sha9, true);
|
||||
|
||||
// PR visibilities are outdated (i.e. the opposte of what the should).
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities should have been updated.
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
Promise.
|
||||
all([
|
||||
prUpdated(+publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(+hiddenPr).then(h.verifyResponse(200)),
|
||||
]).
|
||||
then(() => {
|
||||
// PR visibilities should have been updated.
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||
}).
|
||||
then(() => {
|
||||
h.deletePrDir(publicPr, true);
|
||||
h.deletePrDir(hiddenPr, false);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should be able to request re-checking visibility (if up-to-date)', async () => {
|
||||
const publicPr = PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER;
|
||||
const hiddenPr = PrNums.TRUST_CHECK_UNTRUSTED;
|
||||
it('should be able to request re-checking visibility (if up-to-date)', done => {
|
||||
const publicPr = pr9;
|
||||
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||
|
||||
h.createDummyBuild(publicPr, SHA, true);
|
||||
h.createDummyBuild(hiddenPr, SHA, false);
|
||||
h.createDummyBuild(publicPr, sha9, true);
|
||||
h.createDummyBuild(hiddenPr, sha9, false);
|
||||
|
||||
// PR visibilities are already up-to-date.
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: publicPr, sha: SHA, isPublic: true }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: hiddenPr, sha: SHA, isPublic: true }).not.toExistAsABuild(false);
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||
|
||||
await Promise.all([
|
||||
prUpdated(publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(hiddenPr).then(h.verifyResponse(200)),
|
||||
]);
|
||||
|
||||
// PR visibilities are still up-to-date.
|
||||
expect({ prNum: publicPr, isPublic: true }).toExistAsABuild();
|
||||
expect({ prNum: publicPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: true }).not.toExistAsABuild();
|
||||
expect({ prNum: hiddenPr, isPublic: false }).toExistAsABuild();
|
||||
Promise.
|
||||
all([
|
||||
prUpdated(+publicPr).then(h.verifyResponse(200)),
|
||||
prUpdated(+hiddenPr).then(h.verifyResponse(200)),
|
||||
]).
|
||||
then(() => {
|
||||
// PR visibilities are still up-to-date.
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if re-checking visibility fails', async () => {
|
||||
const errorPr = PrNums.TRUST_CHECK_ERROR;
|
||||
it('should reject a request if re-checking visibility fails', done => {
|
||||
const errorPr = String(c.BV_getPrIsTrusted_error);
|
||||
|
||||
h.createDummyBuild(errorPr, SHA, true);
|
||||
h.createDummyBuild(errorPr, sha9, true);
|
||||
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild(false);
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild(false);
|
||||
expect(h.buildExists(errorPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(errorPr, '', true)).toBe(true);
|
||||
|
||||
await prUpdated(errorPr).then(h.verifyResponse(500, /TRUST_CHECK_ERROR/));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: errorPr, isPublic: false }).not.toExistAsABuild();
|
||||
expect({ prNum: errorPr, isPublic: true }).toExistAsABuild();
|
||||
prUpdated(+errorPr).
|
||||
then(h.verifyResponse(500, /Test/)).
|
||||
then(() => {
|
||||
// PR visibility should not have been updated.
|
||||
expect(h.buildExists(errorPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(errorPr, '', true)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject a request if updating visibility fails', async () => {
|
||||
it('should reject a request if updating visibility fails', done => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, false);
|
||||
h.createDummyBuild(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, SHA, true);
|
||||
h.createDummyBuild(pr9, sha9, false);
|
||||
h.createDummyBuild(pr9, sha9, true);
|
||||
|
||||
const hiddenPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, false);
|
||||
const publicPrDir = h.getPrDir(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, true);
|
||||
const hiddenPrDir = h.getPrDir(pr9, false);
|
||||
const publicPrDir = h.getPrDir(pr9, true);
|
||||
const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`);
|
||||
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild(false);
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild(false);
|
||||
expect(h.buildExists(pr9, '', false)).toBe(true);
|
||||
expect(h.buildExists(pr9, '', true)).toBe(true);
|
||||
|
||||
await prUpdated(PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER).then(h.verifyResponse(409, bodyRegex));
|
||||
|
||||
// PR visibility should not have been updated.
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: false }).toExistAsABuild();
|
||||
expect({ prNum: PrNums.TRUST_CHECK_ACTIVE_TRUSTED_USER, isPublic: true }).toExistAsABuild();
|
||||
prUpdated(+pr9).
|
||||
then(h.verifyResponse(409, bodyRegex)).
|
||||
then(() => {
|
||||
// PR visibility should not have been updated.
|
||||
expect(h.buildExists(pr9, '', false)).toBe(true);
|
||||
expect(h.buildExists(pr9, '', true)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -1,2 +0,0 @@
|
||||
import '../preview-server';
|
||||
import './mock-external-apis';
|
@ -0,0 +1,38 @@
|
||||
// Imports
|
||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../upload-server/build-verifier';
|
||||
import {UploadError} from '../upload-server/upload-error';
|
||||
import * as c from './constants';
|
||||
|
||||
// Run
|
||||
// TODO(gkalpak): Add e2e tests to cover these interactions as well.
|
||||
GithubPullRequests.prototype.addComment = () => Promise.resolve();
|
||||
BuildVerifier.prototype.getPrIsTrusted = (pr: number) => {
|
||||
switch (pr) {
|
||||
case c.BV_getPrIsTrusted_error:
|
||||
// For e2e tests, fake an error.
|
||||
return Promise.reject('Test');
|
||||
case c.BV_getPrIsTrusted_notTrusted:
|
||||
// For e2e tests, fake an untrusted PR (`false`).
|
||||
return Promise.resolve(false);
|
||||
default:
|
||||
// For e2e tests, default to trusted PRs (`true`).
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
};
|
||||
BuildVerifier.prototype.verify = (expectedPr: number, authHeader: string) => {
|
||||
switch (authHeader) {
|
||||
case c.BV_verify_error:
|
||||
// For e2e tests, fake a verification error.
|
||||
return Promise.reject(new UploadError(403, `Error while verifying upload for PR ${expectedPr}: Test`));
|
||||
case c.BV_verify_verifiedNotTrusted:
|
||||
// For e2e tests, fake a `verifiedNotTrusted` verification status.
|
||||
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted);
|
||||
default:
|
||||
// For e2e tests, default to `verifiedAndTrusted` verification status.
|
||||
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedAndTrusted);
|
||||
}
|
||||
};
|
||||
|
||||
// tslint:disable-next-line: no-var-requires
|
||||
require('../upload-server/index');
|
@ -1,30 +0,0 @@
|
||||
declare module 'tar-stream' {
|
||||
|
||||
import {Readable, Writable} from 'stream';
|
||||
|
||||
export interface Pack extends Readable {
|
||||
entry(header: Header, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, contents: string, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: Buffer, callback?: (err?: any) => {}): Writable;
|
||||
entry(header: Header, buffer: string|Buffer, callback?: (err?: any) => {}): Writable;
|
||||
finalize();
|
||||
destroy(err: any);
|
||||
}
|
||||
|
||||
export interface Header {
|
||||
name: string;
|
||||
mode?: number;
|
||||
uid?: number;
|
||||
gid?: number;
|
||||
size?: number;
|
||||
mtime?: Date;
|
||||
type?: type;
|
||||
linkname?: string;
|
||||
uname?: string;
|
||||
gname?: string;
|
||||
devmajor?: number;
|
||||
devminor?: number;
|
||||
}
|
||||
|
||||
export function pack(): Pack;
|
||||
}
|
@ -0,0 +1,571 @@
|
||||
// Imports
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as c from './constants';
|
||||
import {CmdResult, helper as h} from './helper';
|
||||
|
||||
// Tests
|
||||
describe('upload-server (on HTTP)', () => {
|
||||
const hostname = h.uploadHostname;
|
||||
const port = h.uploadPort;
|
||||
const host = `${hostname}:${port}`;
|
||||
const pr = '9';
|
||||
const sha9 = '9'.repeat(40);
|
||||
const sha0 = '0'.repeat(40);
|
||||
|
||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||
afterEach(() => h.cleanUp());
|
||||
|
||||
|
||||
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
||||
const authorizationHeader = `--header "Authorization: Token FOO"`;
|
||||
const xFileHeader = `--header "X-File: ${h.buildsDir}/snapshot.tar.gz"`;
|
||||
const defaultHeaders = `${authorizationHeader} ${xFileHeader}`;
|
||||
const curl = (url: string, headers = defaultHeaders) => `curl -iL ${headers} ${url}`;
|
||||
|
||||
|
||||
it('should disallow non-GET requests', done => {
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests without an \'AUTHORIZATION\' header', done => {
|
||||
const headers1 = '';
|
||||
const headers2 = '--header "AUTHORIXATION: "';
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Missing or empty 'AUTHORIZATION' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(curl(url, headers1)).then(h.verifyResponse(401, bodyRegex)),
|
||||
h.runCmd(curl(url, headers2)).then(h.verifyResponse(401, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests without an \'X-FILE\' header', done => {
|
||||
const headers1 = authorizationHeader;
|
||||
const headers2 = `${authorizationHeader} --header "X-FILE: "`;
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = /^Missing or empty 'X-FILE' header/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(curl(url, headers1)).then(h.verifyResponse(400, bodyRegex)),
|
||||
h.runCmd(curl(url, headers2)).then(h.verifyResponse(400, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests for which the PR verification fails', done => {
|
||||
const headers = `--header "Authorization: ${c.BV_verify_error}" ${xFileHeader}`;
|
||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||
const bodyRegex = new RegExp(`Error while verifying upload for PR ${pr}: Test`);
|
||||
|
||||
h.runCmd(curl(url, headers)).
|
||||
then(h.verifyResponse(403, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const cmdPrefix = curl(`http://${host}`);
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject PRs with leading zeros', done => {
|
||||
h.runCmd(curl(`http://${host}/create-build/0${pr}/${sha9}`)).
|
||||
then(h.verifyResponse(404)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||
Promise.all([
|
||||
h.runCmd(curl(`http://${host}/create-build/${pr}/0${sha9}`)).then(h.verifyResponse(404)),
|
||||
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha9}`)).then(h.verifyResponse(500)),
|
||||
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha0}`)).then(h.verifyResponse(500)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
[true, false].forEach(isPublic => describe(`(for ${isPublic ? 'public' : 'hidden'} builds)`, () => {
|
||||
const authorizationHeader2 = isPublic ?
|
||||
authorizationHeader : `--header "Authorization: ${c.BV_verify_verifiedNotTrusted}"`;
|
||||
const cmdPrefix = curl('', `${authorizationHeader2} ${xFileHeader}`);
|
||||
const overwriteRe = RegExp(`^Request to overwrite existing ${isPublic ? 'public' : 'non-public'} directory`);
|
||||
|
||||
|
||||
it('should not overwrite existing builds', done => {
|
||||
h.createDummyBuild(pr, sha9, isPublic);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||
|
||||
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
|
||||
|
||||
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(409, overwriteRe)).
|
||||
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds (even if the SHA is different)', done => {
|
||||
// Since only the first few characters of the SHA are used, it is possible for two different
|
||||
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
|
||||
// overwrite the first.
|
||||
|
||||
const sha9Almost = sha9.replace(/.$/, '8');
|
||||
expect(sha9Almost).not.toBe(sha9);
|
||||
|
||||
h.createDummyBuild(pr, sha9, isPublic);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||
|
||||
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
|
||||
|
||||
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9Almost}`).
|
||||
then(h.verifyResponse(409, overwriteRe)).
|
||||
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should delete the PR directory on error (for new PR)', done => {
|
||||
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(500)).
|
||||
then(() => expect(h.buildExists(pr, '', isPublic)).toBe(false)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should only delete the SHA directory on error (for existing PR)', done => {
|
||||
h.createDummyBuild(pr, sha0, isPublic);
|
||||
|
||||
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(500)).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
|
||||
expect(h.buildExists(pr, '', isPublic)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('on successful upload', () => {
|
||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
const statusCode = isPublic ? 201 : 202;
|
||||
let uploadPromise: Promise<CmdResult>;
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyArchive(pr, sha9, archivePath);
|
||||
uploadPromise = h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`);
|
||||
});
|
||||
afterEach(() => h.deletePrDir(pr, isPublic));
|
||||
|
||||
|
||||
it(`should respond with ${statusCode}`, done => {
|
||||
uploadPromise.then(h.verifyResponse(statusCode)).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should extract the contents of the uploaded file', done => {
|
||||
uploadPromise.
|
||||
then(() => {
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
|
||||
expect(h.readBuildFile(pr, sha9, 'foo/bar.js', isPublic)).toContain(`uploaded/${pr}`);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it(`should create files/directories owned by '${h.wwwUser}'`, done => {
|
||||
const prDir = h.getPrDir(pr, isPublic);
|
||||
const shaDir = h.getShaDir(prDir, sha9);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
uploadPromise.
|
||||
then(() => Promise.all([
|
||||
h.runCmd(`find ${shaDir}`),
|
||||
h.runCmd(`find ${shaDir} -user ${h.wwwUser}`),
|
||||
])).
|
||||
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
|
||||
expect(userFiles).toBe(allFiles);
|
||||
expect(userFiles).toContain(shaDir);
|
||||
expect(userFiles).toContain(idxPath);
|
||||
expect(userFiles).toContain(barPath);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should delete the uploaded file', done => {
|
||||
expect(fs.existsSync(archivePath)).toBe(true);
|
||||
uploadPromise.
|
||||
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should make the build directory non-writable', done => {
|
||||
const prDir = h.getPrDir(pr, isPublic);
|
||||
const shaDir = h.getShaDir(prDir, sha9);
|
||||
const idxPath = path.join(shaDir, 'index.html');
|
||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||
|
||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||
const isNotWritable = (fileOrDir: string) => {
|
||||
const mode = fs.statSync(fileOrDir).mode;
|
||||
// tslint:disable-next-line: no-bitwise
|
||||
return !(mode & parseInt('222', 8));
|
||||
};
|
||||
|
||||
uploadPromise.
|
||||
then(() => {
|
||||
expect(isNotWritable(shaDir)).toBe(true);
|
||||
expect(isNotWritable(idxPath)).toBe(true);
|
||||
expect(isNotWritable(barPath)).toBe(true);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)', done => {
|
||||
// It is possible that 40-chars long build directories exist, if they had been deployed
|
||||
// before implementing the shorter build directory names. In that case, we don't want the
|
||||
// second (shorter) name to be considered the same as the old one (even if they originate
|
||||
// from the same SHA).
|
||||
|
||||
h.createDummyBuild(pr, sha9, isPublic, false, true);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toContain('index.html');
|
||||
|
||||
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic, true);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
|
||||
|
||||
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||
then(h.verifyResponse(statusCode)).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
|
||||
expect(h.buildExists(pr, sha9, isPublic, true)).toBe(true);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe('when the PR\'s visibility has changed', () => {
|
||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||
const statusCode = isPublic ? 201 : 202;
|
||||
|
||||
const checkPrVisibility = (isPublic2: boolean) => {
|
||||
expect(h.buildExists(pr, '', isPublic2)).toBe(true);
|
||||
expect(h.buildExists(pr, '', !isPublic2)).toBe(false);
|
||||
expect(h.buildExists(pr, sha0, isPublic2)).toBe(true);
|
||||
expect(h.buildExists(pr, sha0, !isPublic2)).toBe(false);
|
||||
};
|
||||
const uploadBuild = (sha: string) => h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha}`);
|
||||
|
||||
beforeEach(() => {
|
||||
h.createDummyBuild(pr, sha0, !isPublic);
|
||||
h.createDummyArchive(pr, sha9, archivePath);
|
||||
checkPrVisibility(!isPublic);
|
||||
});
|
||||
afterEach(() => h.deletePrDir(pr, isPublic));
|
||||
|
||||
|
||||
it('should update the PR\'s visibility', done => {
|
||||
uploadBuild(sha9).
|
||||
then(h.verifyResponse(statusCode)).
|
||||
then(() => {
|
||||
checkPrVisibility(isPublic);
|
||||
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
|
||||
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(sha9);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should not overwrite existing builds (but keep the updated visibility)', done => {
|
||||
expect(h.buildExists(pr, sha0, isPublic)).toBe(false);
|
||||
|
||||
uploadBuild(sha0).
|
||||
then(h.verifyResponse(409, overwriteRe)).
|
||||
then(() => {
|
||||
checkPrVisibility(isPublic);
|
||||
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(pr);
|
||||
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(`uploaded/${pr}`);
|
||||
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(sha0);
|
||||
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(sha9);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject the request if it fails to update the PR\'s visibility', done => {
|
||||
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||
h.createDummyBuild(pr, sha0, isPublic);
|
||||
|
||||
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
|
||||
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
|
||||
|
||||
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(pr, !isPublic)}' ` +
|
||||
`to existing directory '${h.getPrDir(pr, isPublic)}'.`);
|
||||
|
||||
uploadBuild(sha9).
|
||||
then(h.verifyResponse(409, errorRegex)).
|
||||
then(() => {
|
||||
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
|
||||
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
|
||||
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
|
||||
expect(h.buildExists(pr, sha9, !isPublic)).toBe(false);
|
||||
}).
|
||||
then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/health-check`, () => {
|
||||
|
||||
it('should respond with 200', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/health-check`).then(h.verifyResponse(200)),
|
||||
h.runCmd(`curl -iL http://${host}/health-check/`).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 if the path does not match exactly', done => {
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/health-check/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/health-check-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/health-checknfoo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foo/health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foo-health-check`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`curl -iL http://${host}/foonhealth-check`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/pr-updated`, () => {
|
||||
const url = `http://${host}/pr-updated`;
|
||||
|
||||
// Helpers
|
||||
const curl = (payload?: {number: number, action?: string}) => {
|
||||
const payloadStr = payload && JSON.stringify(payload) || '';
|
||||
return `curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`;
|
||||
};
|
||||
|
||||
|
||||
it('should disallow non-POST requests', done => {
|
||||
const bodyRegex = /^Unknown resource in request/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a payload', done => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
h.runCmd(curl()).
|
||||
then(h.verifyResponse(400, bodyRegex)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 400 for requests without a \'number\' field', done => {
|
||||
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(curl({} as any)).then(h.verifyResponse(400, bodyRegex)),
|
||||
h.runCmd(curl({number: null} as any)).then(h.verifyResponse(400, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should reject requests for which checking the PR visibility fails', done => {
|
||||
h.runCmd(curl({number: c.BV_getPrIsTrusted_error})).
|
||||
then(h.verifyResponse(500, /Test/)).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should respond with 404 for unknown paths', done => {
|
||||
const mockPayload = JSON.stringify({number: +pr});
|
||||
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" http://${host}`;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if PR\'s visibility is already up-to-date', done => {
|
||||
const publicPr = pr;
|
||||
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||
const checkVisibilities = () => {
|
||||
// Public build is already public.
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||
// Hidden build is already hidden.
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, sha9, true);
|
||||
h.createDummyBuild(hiddenPr, sha9, false);
|
||||
checkVisibilities();
|
||||
|
||||
Promise.
|
||||
all([
|
||||
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||
]).
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
then(checkVisibilities).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should do nothing if \'action\' implies no visibility change', done => {
|
||||
const publicPr = pr;
|
||||
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||
const checkVisibilities = () => {
|
||||
// Public build is hidden atm.
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||
// Hidden build is public atm.
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||
};
|
||||
|
||||
h.createDummyBuild(publicPr, sha9, false);
|
||||
h.createDummyBuild(hiddenPr, sha9, true);
|
||||
checkVisibilities();
|
||||
|
||||
Promise.
|
||||
all([
|
||||
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||
]).
|
||||
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||
then(checkVisibilities).
|
||||
then(done);
|
||||
});
|
||||
|
||||
|
||||
describe('when the visiblity has changed', () => {
|
||||
const publicPr = pr;
|
||||
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||
|
||||
beforeEach(() => {
|
||||
// Create initial PR builds with opposite visibilities as the ones that will be reported:
|
||||
// - The now public PR was previously hidden.
|
||||
// - The now hidden PR was previously public.
|
||||
h.createDummyBuild(publicPr, sha9, false);
|
||||
h.createDummyBuild(hiddenPr, sha9, true);
|
||||
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||
});
|
||||
afterEach(() => {
|
||||
// Expect PRs' visibility to have been updated:
|
||||
// - The public PR should be actually public (previously it was hidden).
|
||||
// - The hidden PR should be actually hidden (previously it was public).
|
||||
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||
|
||||
h.deletePrDir(publicPr, true);
|
||||
h.deletePrDir(hiddenPr, false);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: undefined)', done => {
|
||||
Promise.all([
|
||||
h.runCmd(curl({number: +publicPr})).then(h.verifyResponse(200)),
|
||||
h.runCmd(curl({number: +hiddenPr})).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: labeled)', done => {
|
||||
Promise.all([
|
||||
h.runCmd(curl({number: +publicPr, action: 'labeled'})).then(h.verifyResponse(200)),
|
||||
h.runCmd(curl({number: +hiddenPr, action: 'labeled'})).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
|
||||
it('should update the PR\'s visibility (action: unlabeled)', done => {
|
||||
Promise.all([
|
||||
h.runCmd(curl({number: +publicPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
|
||||
h.runCmd(curl({number: +hiddenPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe(`${host}/*`, () => {
|
||||
|
||||
it('should respond with 404 for requests to unknown URLs', done => {
|
||||
const bodyRegex = /^Unknown resource/;
|
||||
|
||||
Promise.all([
|
||||
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||
]).then(done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user