Merge remote-tracking branch 'en/master' into aio
# Conflicts: # aio/content/guide/ajs-quick-reference.md # aio/content/guide/animations.md # aio/content/guide/aot-compiler.md # aio/content/guide/architecture.md # aio/content/guide/attribute-directives.md # aio/content/guide/bootstrapping.md # aio/content/guide/browser-support.md # aio/content/guide/cb-index.md # aio/content/guide/change-log.md # aio/content/guide/cheatsheet.md # aio/content/guide/cli-quickstart.md # aio/content/guide/component-interaction.md # aio/content/guide/component-styles.md # aio/content/guide/dependency-injection-in-action.md # aio/content/guide/dependency-injection.md # aio/content/guide/deployment.md # aio/content/guide/displaying-data.md # aio/content/guide/dynamic-component-loader.md # aio/content/guide/dynamic-form.md # aio/content/guide/form-validation.md # aio/content/guide/forms.md # aio/content/guide/glossary.md # aio/content/guide/hierarchical-dependency-injection.md # aio/content/guide/i18n.md # aio/content/guide/index.md # aio/content/guide/learning-angular.md # aio/content/guide/lifecycle-hooks.md # aio/content/guide/ngmodule-faq.md # aio/content/guide/ngmodule.md # aio/content/guide/npm-packages.md # aio/content/guide/pipes.md # aio/content/guide/quickstart.md # aio/content/guide/reactive-forms.md # aio/content/guide/router.md # aio/content/guide/security.md # aio/content/guide/server-communication.md # aio/content/guide/set-document-title.md # aio/content/guide/setup-systemjs-anatomy.md # aio/content/guide/setup.md # aio/content/guide/structural-directives.md # aio/content/guide/styleguide.md # aio/content/guide/template-syntax.md # aio/content/guide/testing.md # aio/content/guide/ts-to-js.md # aio/content/guide/typescript-configuration.md # aio/content/guide/upgrade.md # aio/content/guide/user-input.md # aio/content/guide/visual-studio-2015.md # aio/content/guide/webpack.md # aio/content/navigation.json # aio/content/tutorial/index.md # aio/content/tutorial/toh-pt1.md # aio/content/tutorial/toh-pt2.md # aio/content/tutorial/toh-pt3.md # aio/content/tutorial/toh-pt4.md # aio/content/tutorial/toh-pt5.md # aio/content/tutorial/toh-pt6.md # aio/package.json # aio/src/styles/main.scss # aio/transforms/angular.io-package/index.js
This commit is contained in:
commit
f2059b445c
6
.bazelrc
Normal file
6
.bazelrc
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# Disable sandboxing because it's too slow.
|
||||||
|
# https://github.com/bazelbuild/bazel/issues/2424
|
||||||
|
build --spawn_strategy=standalone
|
||||||
|
|
||||||
|
# Performance: avoid stat'ing input files
|
||||||
|
build --watchfs
|
56
.circleci/config.yml
Normal file
56
.circleci/config.yml
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# Configuration file for https://circleci.com/gh/angular/angular
|
||||||
|
|
||||||
|
# Note: YAML anchors allow an object to be re-used, reducing duplication.
|
||||||
|
# The ampersand declares an alias for an object, then later the `<<: *name`
|
||||||
|
# syntax dereferences it.
|
||||||
|
# See http://blog.daemonl.com/2016/02/yaml.html
|
||||||
|
# To validate changes, use an online parser, eg.
|
||||||
|
# http://yaml-online-parser.appspot.com/
|
||||||
|
|
||||||
|
# Settings common to each job
|
||||||
|
anchor_1: &job_defaults
|
||||||
|
working_directory: ~/ng
|
||||||
|
docker:
|
||||||
|
- image: angular/ngcontainer
|
||||||
|
|
||||||
|
# After checkout, rebase on top of master.
|
||||||
|
# Similar to travis behavior, but not quite the same.
|
||||||
|
# See https://discuss.circleci.com/t/1662
|
||||||
|
anchor_2: &post_checkout
|
||||||
|
post: git pull --ff-only origin "refs/pull/${CI_PULL_REQUEST//*pull\//}/merge"
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
<<: *job_defaults
|
||||||
|
steps:
|
||||||
|
- checkout:
|
||||||
|
<<: *post_checkout
|
||||||
|
- restore_cache:
|
||||||
|
key: angular-{{ .Branch }}-{{ checksum "npm-shrinkwrap.json" }}
|
||||||
|
|
||||||
|
- run: npm install
|
||||||
|
- run: npm run postinstall
|
||||||
|
- run: ./node_modules/.bin/gulp lint
|
||||||
|
|
||||||
|
build:
|
||||||
|
<<: *job_defaults
|
||||||
|
steps:
|
||||||
|
- checkout:
|
||||||
|
<<: *post_checkout
|
||||||
|
- restore_cache:
|
||||||
|
key: angular-{{ .Branch }}-{{ checksum "npm-shrinkwrap.json" }}
|
||||||
|
|
||||||
|
- run: bazel run @build_bazel_rules_typescript_node//:bin/npm install
|
||||||
|
- run: bazel build packages/...
|
||||||
|
- save_cache:
|
||||||
|
key: angular-{{ .Branch }}-{{ checksum "npm-shrinkwrap.json" }}
|
||||||
|
paths:
|
||||||
|
- "node_modules"
|
||||||
|
|
||||||
|
workflows:
|
||||||
|
version: 2
|
||||||
|
default_workflow:
|
||||||
|
jobs:
|
||||||
|
- lint
|
||||||
|
- build
|
64
.github/ISSUE_TEMPLATE.md
vendored
64
.github/ISSUE_TEMPLATE.md
vendored
@ -1,39 +1,57 @@
|
|||||||
<!--
|
<!--
|
||||||
IF YOU DON'T FILL OUT THE FOLLOWING INFORMATION WE MIGHT CLOSE YOUR ISSUE WITHOUT INVESTIGATING
|
PLEASE HELP US PROCESS GITHUB ISSUES FASTER BY PROVIDING THE FOLLOWING INFORMATION.
|
||||||
|
|
||||||
|
ISSUES MISSING IMPORTANT INFORMATION MAY BE CLOSED WITHOUT INVESTIGATION.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**I'm submitting a ...** (check one with "x")
|
## I'm submitting a...
|
||||||
```
|
<!-- Check one of the following options with "x" -->
|
||||||
[ ] bug report => search github for a similar issue or PR before submitting
|
<pre><code>
|
||||||
[ ] feature request
|
[ ] Regression (a behavior that used to work and stopped working in a new release)
|
||||||
[ ] support request => Please do not submit support request here, instead see https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question
|
[ ] Bug report <!-- Please search GitHub for a similar issue or PR before submitting -->
|
||||||
```
|
[ ] Feature request
|
||||||
|
[ ] Documentation issue or request
|
||||||
|
[ ] Support request => Please do not submit support request here, instead see https://github.com/angular/angular/blob/master/CONTRIBUTING.md#question
|
||||||
|
</code></pre>
|
||||||
|
|
||||||
**Current behavior**
|
## Current behavior
|
||||||
<!-- Describe how the bug manifests. -->
|
<!-- Describe how the issue manifests. -->
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
<!-- Describe what the behavior would be without the bug. -->
|
|
||||||
|
|
||||||
**Minimal reproduction of the problem with instructions**
|
## Expected behavior
|
||||||
|
<!-- Describe what the desired behavior would be. -->
|
||||||
|
|
||||||
|
|
||||||
|
## Minimal reproduction of the problem with instructions
|
||||||
<!--
|
<!--
|
||||||
If the current behavior is a bug or you can illustrate your feature request better with an example,
|
For bug reports please provide the *STEPS TO REPRODUCE* and if possible a *MINIMAL DEMO* of the problem via
|
||||||
please provide the *STEPS TO REPRODUCE* and if possible a *MINIMAL DEMO* of the problem via
|
|
||||||
https://plnkr.co or similar (you can use this template as a starting point: http://plnkr.co/edit/tpl:AvJOMERrnz94ekVua0u5).
|
https://plnkr.co or similar (you can use this template as a starting point: http://plnkr.co/edit/tpl:AvJOMERrnz94ekVua0u5).
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**What is the motivation / use case for changing the behavior?**
|
## What is the motivation / use case for changing the behavior?
|
||||||
<!-- Describe the motivation or the concrete use case -->
|
<!-- Describe the motivation or the concrete use case. -->
|
||||||
|
|
||||||
**Please tell us about your environment:**
|
|
||||||
<!-- Operating system, IDE, package manager, HTTP server, ... -->
|
|
||||||
|
|
||||||
* **Angular version:** 2.0.X
|
## Environment
|
||||||
|
|
||||||
|
<pre><code>
|
||||||
|
Angular version: X.Y.Z
|
||||||
<!-- Check whether this is still an issue in the most recent Angular version -->
|
<!-- Check whether this is still an issue in the most recent Angular version -->
|
||||||
|
|
||||||
* **Browser:** [all | Chrome XX | Firefox XX | IE XX | Safari XX | Mobile Chrome XX | Android X.X Web Browser | iOS XX Safari | iOS XX UIWebView | iOS XX WKWebView ]
|
Browser:
|
||||||
<!-- All browsers where this could be reproduced -->
|
- [ ] Chrome (desktop) version XX
|
||||||
|
- [ ] Chrome (Android) version XX
|
||||||
|
- [ ] Chrome (iOS) version XX
|
||||||
|
- [ ] Firefox version XX
|
||||||
|
- [ ] Safari (desktop) version XX
|
||||||
|
- [ ] Safari (iOS) version XX
|
||||||
|
- [ ] IE version XX
|
||||||
|
- [ ] Edge version XX
|
||||||
|
|
||||||
* **Language:** [all | TypeScript X.X | ES6/7 | ES5]
|
For Tooling issues:
|
||||||
|
- Node version: XX <!-- run `node --version` -->
|
||||||
|
- Platform: <!-- Mac, Linux, Windows -->
|
||||||
|
|
||||||
* **Node (for AoT issues):** `node --version` =
|
Others:
|
||||||
|
<!-- Anything else relevant? Operating system version, IDE, package manager, HTTP server, ... -->
|
||||||
|
</code></pre>
|
||||||
|
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,10 +1,15 @@
|
|||||||
**Please check if the PR fulfills these requirements**
|
## PR Checklist
|
||||||
|
Please check if your PR fulfills the following requirements:
|
||||||
|
|
||||||
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
- [ ] The commit message follows our guidelines: https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit
|
||||||
- [ ] Tests for the changes have been added (for bug fixes / features)
|
- [ ] Tests for the changes have been added (for bug fixes / features)
|
||||||
- [ ] Docs have been added / updated (for bug fixes / features)
|
- [ ] Docs have been added / updated (for bug fixes / features)
|
||||||
|
|
||||||
|
|
||||||
**What kind of change does this PR introduce?** (check one with "x")
|
## PR Type
|
||||||
|
What kind of change does this PR introduce?
|
||||||
|
|
||||||
|
<!-- Please check the one that applies to this PR using "x". -->
|
||||||
```
|
```
|
||||||
[ ] Bugfix
|
[ ] Bugfix
|
||||||
[ ] Feature
|
[ ] Feature
|
||||||
@ -12,25 +17,27 @@
|
|||||||
[ ] Refactoring (no functional changes, no api changes)
|
[ ] Refactoring (no functional changes, no api changes)
|
||||||
[ ] Build related changes
|
[ ] Build related changes
|
||||||
[ ] CI related changes
|
[ ] CI related changes
|
||||||
|
[ ] Documentation content changes
|
||||||
|
[ ] angular.io application / infrastructure changes
|
||||||
[ ] Other... Please describe:
|
[ ] Other... Please describe:
|
||||||
```
|
```
|
||||||
|
|
||||||
**What is the current behavior?** (You can also link to an open issue here)
|
## What is the current behavior?
|
||||||
|
<!-- Please describe the current behavior that you are modifying, or link to a relevant issue. -->
|
||||||
|
|
||||||
|
Issue Number: N/A
|
||||||
|
|
||||||
|
|
||||||
|
## What is the new behavior?
|
||||||
**What is the new behavior?**
|
|
||||||
|
|
||||||
|
|
||||||
|
## Does this PR introduce a breaking change?
|
||||||
**Does this PR introduce a breaking change?** (check one with "x")
|
|
||||||
```
|
```
|
||||||
[ ] Yes
|
[ ] Yes
|
||||||
[ ] No
|
[ ] No
|
||||||
```
|
```
|
||||||
|
|
||||||
If this PR contains a breaking change, please describe the impact and migration path for existing applications: ...
|
<!-- If this PR contains a breaking change, please describe the impact and migration path for existing applications below. -->
|
||||||
|
|
||||||
|
|
||||||
**Other information**:
|
## Other information
|
||||||
|
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,8 @@
|
|||||||
.DS_STORE
|
.DS_STORE
|
||||||
|
|
||||||
/dist/
|
/dist/
|
||||||
|
bazel-*
|
||||||
|
e2e_test.*
|
||||||
node_modules
|
node_modules
|
||||||
bower_components
|
bower_components
|
||||||
|
|
||||||
|
@ -8,9 +8,11 @@
|
|||||||
# alexeagle - Alex Eagle
|
# alexeagle - Alex Eagle
|
||||||
# alxhub - Alex Rickabaugh
|
# alxhub - Alex Rickabaugh
|
||||||
# chuckjaz - Chuck Jazdzewski
|
# chuckjaz - Chuck Jazdzewski
|
||||||
|
# Foxandxss - Jesús Rodríguez
|
||||||
# gkalpak - George Kalpakas
|
# gkalpak - George Kalpakas
|
||||||
# IgorMinar - Igor Minar
|
# IgorMinar - Igor Minar
|
||||||
# jasonaden - Jason Aden
|
# jasonaden - Jason Aden
|
||||||
|
# juleskremer - Jules Kremer
|
||||||
# kara - Kara Erickson
|
# kara - Kara Erickson
|
||||||
# matsko - Matias Niemelä
|
# matsko - Matias Niemelä
|
||||||
# mhevery - Misko Hevery
|
# mhevery - Misko Hevery
|
||||||
@ -18,10 +20,11 @@
|
|||||||
# pkozlowski-opensource - Pawel Kozlowski
|
# pkozlowski-opensource - Pawel Kozlowski
|
||||||
# robwormald - Rob Wormald
|
# robwormald - Rob Wormald
|
||||||
# tbosch - Tobias Bosch
|
# tbosch - Tobias Bosch
|
||||||
|
# tinayuangao - Tina Gao
|
||||||
# vicb - Victor Berchet
|
# vicb - Victor Berchet
|
||||||
# vikerman - Vikram Subramanian
|
# vikerman - Vikram Subramanian
|
||||||
# wardbell - Ward Bell
|
# wardbell - Ward Bell
|
||||||
# tinayuangao - Tina Gao
|
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
@ -66,8 +69,8 @@ groups:
|
|||||||
- "*.lock"
|
- "*.lock"
|
||||||
- "tools/*"
|
- "tools/*"
|
||||||
exclude:
|
exclude:
|
||||||
- "tools/@angular/tsc-wrapped/*"
|
|
||||||
- "tools/public_api_guard/*"
|
- "tools/public_api_guard/*"
|
||||||
|
- "tools/ngc-wrapped/*"
|
||||||
- "aio/*"
|
- "aio/*"
|
||||||
users:
|
users:
|
||||||
- IgorMinar #primary
|
- IgorMinar #primary
|
||||||
@ -93,19 +96,21 @@ groups:
|
|||||||
- "packages/core/*"
|
- "packages/core/*"
|
||||||
users:
|
users:
|
||||||
- tbosch #primary
|
- tbosch #primary
|
||||||
|
- chuckjaz
|
||||||
- mhevery
|
- mhevery
|
||||||
- vicb
|
- vicb
|
||||||
- IgorMinar #fallback
|
- IgorMinar #fallback
|
||||||
|
|
||||||
compiler/animations:
|
animations:
|
||||||
conditions:
|
conditions:
|
||||||
files:
|
files:
|
||||||
- "packages/compiler/src/animation/*"
|
- "packages/animation/*"
|
||||||
|
- "packages/platform-browser/animations/*"
|
||||||
users:
|
users:
|
||||||
- matsko #primary
|
- matsko #primary
|
||||||
- tbosch
|
- chuckjaz #fallback
|
||||||
- IgorMinar #fallback
|
|
||||||
- mhevery #fallback
|
- mhevery #fallback
|
||||||
|
- IgorMinar #fallback
|
||||||
|
|
||||||
compiler/i18n:
|
compiler/i18n:
|
||||||
conditions:
|
conditions:
|
||||||
@ -131,11 +136,13 @@ groups:
|
|||||||
compiler-cli:
|
compiler-cli:
|
||||||
conditions:
|
conditions:
|
||||||
files:
|
files:
|
||||||
- "tools/@angular/tsc-wrapped/*"
|
- "packages/tsc-wrapped/*"
|
||||||
- "packages/compiler-cli/*"
|
- "packages/compiler-cli/*"
|
||||||
|
- "tools/ngc-wrapped/*"
|
||||||
users:
|
users:
|
||||||
- alexeagle
|
- alexeagle
|
||||||
- chuckjaz
|
- chuckjaz
|
||||||
|
- vicb
|
||||||
- tbosch
|
- tbosch
|
||||||
- IgorMinar #fallback
|
- IgorMinar #fallback
|
||||||
- mhevery #fallback
|
- mhevery #fallback
|
||||||
@ -248,10 +255,46 @@ groups:
|
|||||||
angular.io:
|
angular.io:
|
||||||
conditions:
|
conditions:
|
||||||
files:
|
files:
|
||||||
- "aio/*"
|
include:
|
||||||
|
- "aio/*"
|
||||||
|
exclude:
|
||||||
|
- "aio/content/*"
|
||||||
users:
|
users:
|
||||||
- IgorMinar #primary
|
- petebacondarwin #primary
|
||||||
- petebacondarwin #secondary
|
- IgorMinar
|
||||||
- gkalpak
|
- gkalpak
|
||||||
- wardbell
|
- mhevery #fallback
|
||||||
|
|
||||||
|
angular.io-guide-and-tutorial:
|
||||||
|
conditions:
|
||||||
|
files:
|
||||||
|
include:
|
||||||
|
- "aio/content/*"
|
||||||
|
exclude:
|
||||||
|
- "aio/content/marketing/*"
|
||||||
|
- "aio/content/navigation.json"
|
||||||
|
- "aio/content/license.md"
|
||||||
|
users:
|
||||||
|
- juleskremer #primary
|
||||||
|
- Foxandxss
|
||||||
|
- stephenfluin
|
||||||
|
- wardbell
|
||||||
|
- petebacondarwin
|
||||||
|
- gkalpak
|
||||||
|
- IgorMinar #fallback
|
||||||
|
- mhevery #fallback
|
||||||
|
|
||||||
|
angular.io-marketing:
|
||||||
|
conditions:
|
||||||
|
files:
|
||||||
|
include:
|
||||||
|
- "aio/content/marketing/*"
|
||||||
|
- "aio/content/navigation.json"
|
||||||
|
- "aio/content/license.md"
|
||||||
|
users:
|
||||||
|
- juleskremer #primary
|
||||||
|
- stephenfluin
|
||||||
|
- petebacondarwin
|
||||||
|
- gkalpak
|
||||||
|
- IgorMinar #fallback
|
||||||
- mhevery #fallback
|
- mhevery #fallback
|
||||||
|
18
.travis.yml
18
.travis.yml
@ -1,9 +1,12 @@
|
|||||||
language: node_js
|
language: node_js
|
||||||
sudo: false
|
sudo: false
|
||||||
|
# force trusty as Google Chrome addon is not supported on Precise
|
||||||
|
dist: trusty
|
||||||
node_js:
|
node_js:
|
||||||
- '6.9.5'
|
- '6.9.5'
|
||||||
|
|
||||||
addons:
|
addons:
|
||||||
|
chrome: stable
|
||||||
# firefox: "38.0"
|
# firefox: "38.0"
|
||||||
apt:
|
apt:
|
||||||
sources:
|
sources:
|
||||||
@ -32,11 +35,15 @@ env:
|
|||||||
global:
|
global:
|
||||||
# GITHUB_TOKEN_ANGULAR=<github token, a personal access token of the angular-builds account, account access in valentine>
|
# GITHUB_TOKEN_ANGULAR=<github token, a personal access token of the angular-builds account, account access in valentine>
|
||||||
# This is needed for the e2e Travis matrix task to publish packages to github for continuous packages delivery.
|
# This is needed for the e2e Travis matrix task to publish packages to github for continuous packages delivery.
|
||||||
- secure: "rNqXoy2gqjbF5tBXlRBy+oiYntO3BtzcxZuEtlLMzNaTNzC4dyMOFub0GkzIPWwOzkARoEU9Kv+bC97fDVbCBUKeyzzEqxqddUKhzRxeaYjsefJ6XeTvBvDxwo7wDwyxZSuWdBeGAe4eARVHm7ypsd+AlvqxtzjyS27TK2BzdL4="
|
- secure: "aCdHveZuY8AT4Jr1JoJB4LxZsnGWRe/KseZh1YXYe5UtufFCtTVHvUcLn0j2aLBF0KpdyS+hWf0i4np9jthKu2xPKriefoPgCMpisYeC0MFkwbmv+XlgkUbgkgVZMGiVyX7DCYXVahxIoOUjVMEDCbNiHTIrfEuyq24U3ok2tHc="
|
||||||
# FIREBASE_TOKEN
|
# FIREBASE_TOKEN
|
||||||
# This is needed for publishing builds to the "aio-staging" firebase site.
|
# This is needed for publishing builds to the "aio-staging" and "angular-io" firebase projects.
|
||||||
# TODO(i): the token was generated using the iminar@google account, we should switch to a shared/role-base account.
|
# This token was generated using the aio-deploy@angular.io account using `firebase login:ci` and password from valentine
|
||||||
- secure: "MPx3UM77o5IlhT75PKHL0FXoB5tSXDc3vnCXCd1sRy4XUTZ9vjcV6nNuyqEf+SOw659bGbC1FI4mACGx1Q+z7MQDR85b1mcA9uSgHDkh+IR82CnCVdaX9d1RXafdJIArahxfmorbiiPPLyPIKggo7ituRm+2c+iraoCkE/pXxYg="
|
- secure: "L5CyQmpwWtoR4Qi4xlWQh/cL1M6ZeJL4W4QAr4HdKFMgYt9h+Whqkymyh2NxwmCbPvWa7yUd+OiLQUDCY7L2VIg16hTwoe2CgYDyQA0BEwLzxtRrJXl93TfwMlrUx5JSIzAccD6D4sjtz8kSFMomK2Nls33xOXOukwyhVMjd0Cg="
|
||||||
|
# ANGULAR_PAYLOAD_FIREBASE_TOKEN
|
||||||
|
# This is for payload size data to "angular-payload-size" firebase project
|
||||||
|
# This token was generated using the payload@angular.io account using `firebase login:ci` and password from valentine
|
||||||
|
- secure: "SxotP/ymNy6uWAVbfwM9BlwETPEBpkRvU/F7fCtQDDic99WfQHzzUSQqHTk8eKk3GrGAOSL09vT0WfStQYEIGEoS5UHWNgOnelxhw+d5EnaoB8vQ0dKQBTK092hQg4feFprr+B/tCasyMV6mVwpUzZMbIJNn/Rx7H5g1bp+Gkfg="
|
||||||
matrix:
|
matrix:
|
||||||
# Order: a slower build first, so that we don't occupy an idle travis worker waiting for others to complete.
|
# Order: a slower build first, so that we don't occupy an idle travis worker waiting for others to complete.
|
||||||
- CI_MODE=e2e
|
- CI_MODE=e2e
|
||||||
@ -48,6 +55,9 @@ env:
|
|||||||
- CI_MODE=browserstack_optional
|
- CI_MODE=browserstack_optional
|
||||||
- CI_MODE=docs_test
|
- CI_MODE=docs_test
|
||||||
- CI_MODE=aio
|
- CI_MODE=aio
|
||||||
|
- CI_MODE=aio_e2e AIO_SHARD=0
|
||||||
|
- CI_MODE=aio_e2e AIO_SHARD=1
|
||||||
|
- CI_MODE=bazel
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
|
25
BUILD.bazel
Normal file
25
BUILD.bazel
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
exports_files(["tsconfig.json"])
|
||||||
|
|
||||||
|
# This rule belongs in node_modules/BUILD
|
||||||
|
# It's here as a workaround for
|
||||||
|
# https://github.com/bazelbuild/bazel/issues/374#issuecomment-296217940
|
||||||
|
filegroup(
|
||||||
|
name = "node_modules",
|
||||||
|
srcs = glob([
|
||||||
|
# Performance workaround: list individual files
|
||||||
|
# This won't scale in the general case.
|
||||||
|
# TODO(alexeagle): figure out what to do
|
||||||
|
"node_modules/typescript/**",
|
||||||
|
"node_modules/zone.js/**",
|
||||||
|
"node_modules/rxjs/**/*.d.ts",
|
||||||
|
"node_modules/rxjs/**/*.js",
|
||||||
|
"node_modules/@types/**/*.d.ts",
|
||||||
|
"node_modules/tsickle/**",
|
||||||
|
"node_modules/hammerjs/**/*.d.ts",
|
||||||
|
"node_modules/protobufjs/**",
|
||||||
|
"node_modules/bytebuffer/**",
|
||||||
|
"node_modules/reflect-metadata/**",
|
||||||
|
"node_modules/minimist/**/*.js",
|
||||||
|
]),
|
||||||
|
)
|
4662
CHANGELOG.md
4662
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -17,15 +17,15 @@ Help us keep Angular open and inclusive. Please read and follow our [Code of Con
|
|||||||
|
|
||||||
## <a name="question"></a> Got a Question or Problem?
|
## <a name="question"></a> Got a Question or Problem?
|
||||||
|
|
||||||
Please, do not open issues for the general support questions as we want to keep GitHub issues for bug reports and feature requests. You've got much better chances of getting your question answered on [StackOverflow](https://stackoverflow.com/questions/tagged/angular) where the questions should be tagged with tag `angular`.
|
Do not open issues for general support questions as we want to keep GitHub issues for bug reports and feature requests. You've got much better chances of getting your question answered on [Stack Overflow](https://stackoverflow.com/questions/tagged/angular) where the questions should be tagged with tag `angular`.
|
||||||
|
|
||||||
StackOverflow is a much better place to ask questions since:
|
Stack Overflow is a much better place to ask questions since:
|
||||||
|
|
||||||
- there are thousands of people willing to help on StackOverflow
|
- there are thousands of people willing to help on Stack Overflow
|
||||||
- questions and answers stay available for public viewing so your question / answer might help someone else
|
- questions and answers stay available for public viewing so your question / answer might help someone else
|
||||||
- StackOverflow's voting system assures that the best answers are prominently visible.
|
- Stack Overflow's voting system assures that the best answers are prominently visible.
|
||||||
|
|
||||||
To save your and our time we will be systematically closing all the issues that are requests for general support and redirecting people to StackOverflow.
|
To save your and our time, we will systematically close all issues that are requests for general support and redirect people to Stack Overflow.
|
||||||
|
|
||||||
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
If you would like to chat about the question in real-time, you can reach out via [our gitter channel][gitter].
|
||||||
|
|
||||||
@ -198,8 +198,7 @@ Must be one of the following:
|
|||||||
* **fix**: A bug fix
|
* **fix**: A bug fix
|
||||||
* **perf**: A code change that improves performance
|
* **perf**: A code change that improves performance
|
||||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing
|
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||||
semi-colons, etc)
|
|
||||||
* **test**: Adding missing tests or correcting existing tests
|
* **test**: Adding missing tests or correcting existing tests
|
||||||
|
|
||||||
### Scope
|
### Scope
|
||||||
@ -207,6 +206,7 @@ The scope should be the name of the npm package affected (as perceived by person
|
|||||||
|
|
||||||
The following is the list of supported scopes:
|
The following is the list of supported scopes:
|
||||||
|
|
||||||
|
* **animations**
|
||||||
* **common**
|
* **common**
|
||||||
* **compiler**
|
* **compiler**
|
||||||
* **compiler-cli**
|
* **compiler-cli**
|
||||||
@ -223,7 +223,7 @@ The following is the list of supported scopes:
|
|||||||
* **upgrade**
|
* **upgrade**
|
||||||
* **tsc-wrapped**
|
* **tsc-wrapped**
|
||||||
|
|
||||||
There is currently few exception to the "use package name" rule:
|
There are currently a few exceptions to the "use package name" rule:
|
||||||
|
|
||||||
* **packaging**: used for changes that change the npm package layout in all of our packages, e.g. public path changes, package.json changes done to all packages, d.ts file/format changes, changes to bundles, etc.
|
* **packaging**: used for changes that change the npm package layout in all of our packages, e.g. public path changes, package.json changes done to all packages, d.ts file/format changes, changes to bundles, etc.
|
||||||
* **changelog**: used for updating the release notes in CHANGELOG.md
|
* **changelog**: used for updating the release notes in CHANGELOG.md
|
||||||
|
11
README.md
11
README.md
@ -3,22 +3,21 @@
|
|||||||
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/angular/angular?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
[](http://issuestats.com/github/angular/angular)
|
[](http://issuestats.com/github/angular/angular)
|
||||||
[](http://issuestats.com/github/angular/angular)
|
[](http://issuestats.com/github/angular/angular)
|
||||||
[](https://badge.fury.io/js/%40angular%2Fcore)
|
[](https://www.npmjs.com/@angular/core)
|
||||||
|
|
||||||
|
|
||||||
[](https://saucelabs.com/u/angular2-ci)
|
[](https://saucelabs.com/u/angular2-ci)
|
||||||
|
|
||||||
*Safari (7+), iOS (7+), Edge (14) and IE mobile (11) are tested on [BrowserStack][browserstack].*
|
*Safari (7+), iOS (7+), Edge (14) and IE mobile (11) are tested on [BrowserStack][browserstack].*
|
||||||
|
|
||||||
Angular
|
# Angular
|
||||||
=========
|
|
||||||
|
|
||||||
Angular is a development platform for building mobile and desktop web applications using Typescript/JavaScript (JS) and other languages.
|
|
||||||
|
|
||||||
|
Angular is a development platform for building mobile and desktop web applications using Typescript/JavaScript and other languages.
|
||||||
|
|
||||||
## Quickstart
|
## Quickstart
|
||||||
|
|
||||||
[Get started in 5 minutes][quickstart].
|
[Get started in 5 minutes][quickstart].
|
||||||
|
|
||||||
|
|
||||||
## Want to help?
|
## Want to help?
|
||||||
|
|
||||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our
|
||||||
|
17
WORKSPACE
Normal file
17
WORKSPACE
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
|
||||||
|
|
||||||
|
git_repository(
|
||||||
|
name = "build_bazel_rules_typescript",
|
||||||
|
remote = "https://github.com/bazelbuild/rules_typescript.git",
|
||||||
|
tag = "0.0.5",
|
||||||
|
)
|
||||||
|
|
||||||
|
load("@build_bazel_rules_typescript//:defs.bzl", "node_repositories")
|
||||||
|
|
||||||
|
node_repositories(package_json = "//:package.json")
|
||||||
|
|
||||||
|
git_repository(
|
||||||
|
name = "build_bazel_rules_angular",
|
||||||
|
remote = "https://github.com/bazelbuild/rules_angular.git",
|
||||||
|
tag = "0.0.1",
|
||||||
|
)
|
@ -9,10 +9,11 @@
|
|||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"assets": [
|
"assets": [
|
||||||
"assets",
|
"assets",
|
||||||
"content",
|
"generated",
|
||||||
"app/search/search-worker.js",
|
"app/search/search-worker.js",
|
||||||
"favicon.ico",
|
"favicon.ico",
|
||||||
"pwa-manifest.json"
|
"pwa-manifest.json",
|
||||||
|
"google385281288605d160.html"
|
||||||
],
|
],
|
||||||
"index": "index.html",
|
"index": "index.html",
|
||||||
"main": "main.ts",
|
"main": "main.ts",
|
||||||
@ -21,16 +22,16 @@
|
|||||||
"tsconfig": "tsconfig.app.json",
|
"tsconfig": "tsconfig.app.json",
|
||||||
"testTsconfig": "tsconfig.spec.json",
|
"testTsconfig": "tsconfig.spec.json",
|
||||||
"prefix": "aio",
|
"prefix": "aio",
|
||||||
"serviceWorker": true,
|
"serviceWorker": false,
|
||||||
"styles": [
|
"styles": [
|
||||||
"styles.scss"
|
"styles.scss"
|
||||||
],
|
],
|
||||||
"scripts": [
|
"scripts": [
|
||||||
|
|
||||||
],
|
],
|
||||||
"environmentSource": "environments/environment.ts",
|
"environmentSource": "environments/environment.ts",
|
||||||
"environments": {
|
"environments": {
|
||||||
"dev": "environments/environment.ts",
|
"dev": "environments/environment.ts",
|
||||||
|
"stage": "environments/environment.stage.ts",
|
||||||
"prod": "environments/environment.prod.ts"
|
"prod": "environments/environment.prod.ts"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"projects": {
|
|
||||||
"staging": "aio-staging"
|
|
||||||
}
|
|
||||||
}
|
|
6
aio/.gitignore
vendored
6
aio/.gitignore
vendored
@ -3,7 +3,7 @@
|
|||||||
# compiled output
|
# compiled output
|
||||||
/dist
|
/dist
|
||||||
/out-tsc
|
/out-tsc
|
||||||
/src/content
|
/src/generated
|
||||||
/tmp
|
/tmp
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
@ -38,7 +38,11 @@ yarn-error.log
|
|||||||
# e2e
|
# e2e
|
||||||
/e2e/*.js
|
/e2e/*.js
|
||||||
/e2e/*.map
|
/e2e/*.map
|
||||||
|
protractor-results*.txt
|
||||||
|
|
||||||
# System Files
|
# System Files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# copied dependencies
|
||||||
|
src/assets/js/lunr*
|
116
aio/README.md
116
aio/README.md
@ -1,31 +1,113 @@
|
|||||||
# Site
|
# Angular documentation project (https://angular.io)
|
||||||
|
|
||||||
This project was generated with [angular-cli](https://github.com/angular/angular-cli) version 1.0.0-beta.26.
|
Everything in this folder is part of the documentation project. This includes
|
||||||
|
|
||||||
## Development server
|
* the web site for displaying the documentation
|
||||||
Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
|
* the dgeni configuration for converting source files to rendered files that can be viewed in the web site.
|
||||||
|
* the tooling for setting up examples for development; and generating plunkers and zip files from the examples.
|
||||||
|
|
||||||
## Code scaffolding
|
## Developer tasks
|
||||||
|
|
||||||
Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive/pipe/service/class/module`.
|
We use `yarn` to manage the dependencies and to run build tasks.
|
||||||
|
You should run all these tasks from the `angular/aio` folder.
|
||||||
|
Here are the most important tasks you might need to use:
|
||||||
|
|
||||||
## Build
|
* `yarn` - install all the dependencies.
|
||||||
|
* `yarn setup` - Install all the dependencies, boilerplate, plunkers, zips and runs dgeni on the docs.
|
||||||
|
|
||||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--prod` flag for a production build.
|
* `yarn start` - run a development web server that watches the files; then builds the doc-viewer and reloads the page, as necessary.
|
||||||
|
* `yarn serve-and-sync` - run both the `docs-watch` and `start` in the same console.
|
||||||
|
* `yarn lint` - check that the doc-viewer code follows our style rules.
|
||||||
|
* `yarn test` - watch all the source files, for the doc-viewer, and run all the unit tests when any change.
|
||||||
|
* `yarn e2e` - run all the e2e tests for the doc-viewer.
|
||||||
|
|
||||||
## Running unit tests
|
* `yarn docs` - generate all the docs from the source files.
|
||||||
|
* `yarn docs-watch` - watch the Angular source and the docs files and run a short-circuited doc-gen for the docs that changed.
|
||||||
|
* `yarn docs-lint` - check that the doc gen code follows our style rules.
|
||||||
|
* `yarn docs-test` - run the unit tests for the doc generation code.
|
||||||
|
|
||||||
Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).
|
* `yarn boilerplate:add` - generate all the boilerplate code for the examples, so that they can be run locally.
|
||||||
|
* `yarn boilerplate:remove` - remove all the boilerplate code that was added via `yarn boilerplate:add`.
|
||||||
|
* `yarn generate-plunkers` - generate the plunker files that are used by the `live-example` tags in the docs.
|
||||||
|
* `yarn generate-zips` - generate the zip files from the examples. Zip available via the `live-example` tags in the docs.
|
||||||
|
|
||||||
## Running end-to-end tests
|
* `yarn example-e2e` - run all e2e tests for examples
|
||||||
|
- `yarn example-e2e -- --setup` - force webdriver update & other setup, then run tests
|
||||||
|
- `yarn example-e2e -- --filter=foo` - limit e2e tests to those containing the word "foo"
|
||||||
|
|
||||||
Run `ng e2e` to execute the end-to-end tests via [Protractor](http://www.protractortest.org/).
|
* `yarn build-ie-polyfills` - generates a js file of polyfills that can be loaded in Internet Explorer.
|
||||||
Before running the tests make sure you are serving the app via `ng serve`.
|
|
||||||
|
|
||||||
## Deploying to GitHub Pages
|
## Using ServiceWorker locally
|
||||||
|
|
||||||
Run `ng github-pages:deploy` to deploy to GitHub Pages.
|
Since abb36e3cb, running `yarn start -- --prod` will no longer set up the ServiceWorker, which
|
||||||
|
would require manually running `yarn sw-manifest` and `yarn sw-copy` (something that is not possible
|
||||||
|
with webpack serving the files from memory).
|
||||||
|
|
||||||
## Further help
|
If you want to test ServiceWorker locally, you can use `yarn build` and serve the files in `dist/`
|
||||||
|
with `yarn http-server -- dist -p 4200`.
|
||||||
|
|
||||||
To get more help on the `angular-cli` use `ng help` or go check out the [Angular-CLI README](https://github.com/angular/angular-cli/blob/master/README.md).
|
For more details see #16745.
|
||||||
|
|
||||||
|
|
||||||
|
## Guide to authoring
|
||||||
|
|
||||||
|
There are two types of content in the documentatation:
|
||||||
|
|
||||||
|
* **API docs**: descriptions of the modules, classes, interfaces, decorators, etc that make up the Angular platform.
|
||||||
|
API docs are generated directly from the source code.
|
||||||
|
The source code is contained in TypeScript files, located in the `angular/packages` folder.
|
||||||
|
Each API item may have a preceding comment, which contains JSDoc style tags and content.
|
||||||
|
The content is written in markdown.
|
||||||
|
|
||||||
|
* **Other content**: guides, tutorials, and other marketing material.
|
||||||
|
All other content is written using markdown in text files, located in the `angular/aio/content` folder.
|
||||||
|
More specifically, there are sub-folders that contain particular types of content: guides, tutorial and marketing.
|
||||||
|
|
||||||
|
We use the [dgeni](https://github.com/angular/dgeni) tool to convert these files into docs that can be viewed in the doc-viewer.
|
||||||
|
|
||||||
|
The [Authors Style Guide](https://angular.io/guide/docs-style-guide) prescribes guidelines for
|
||||||
|
writing guide pages, explains how to use the documentation classes and components, and how to markup sample source code to produce code snippets.
|
||||||
|
|
||||||
|
### Generating the complete docs
|
||||||
|
|
||||||
|
The main task for generating the docs is `yarn docs`. This will process all the source files (API and other),
|
||||||
|
extracting the documentation and generating JSON files that can be consumed by the doc-viewer.
|
||||||
|
|
||||||
|
### Partial doc generation for editors
|
||||||
|
|
||||||
|
Full doc generation can take up to one minute. That's too slow for efficient document creation and editing.
|
||||||
|
|
||||||
|
You can make small changes in a smart editor that displays formatted markdown:
|
||||||
|
>In VS Code, _Cmd-K, V_ opens markdown preview in side pane; _Cmd-B_ toggles left sidebar
|
||||||
|
|
||||||
|
You also want to see those changes displayed properly in the doc viewer
|
||||||
|
with a quick, edit/view cycle time.
|
||||||
|
|
||||||
|
For this purpose, use the `yarn docs-watch` task, which watches for changes to source files and only
|
||||||
|
re-processes the the files necessary to generate the docs that are related to the file that has changed.
|
||||||
|
Since this task takes shortcuts, it is much faster (often less than 1 second) but it won't produce full
|
||||||
|
fidelity content. For example, links to other docs and code examples may not render correctly. This is
|
||||||
|
most particularly noticed in links to other docs and in the embedded examples, which may not always render
|
||||||
|
correctly.
|
||||||
|
|
||||||
|
The general setup is as follows:
|
||||||
|
|
||||||
|
* Open a terminal, ensure the dependencies are installed; run an initial doc generation; then start the doc-viewer:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn
|
||||||
|
yarn docs
|
||||||
|
yarn start
|
||||||
|
```
|
||||||
|
|
||||||
|
* Open a second terminal and start watching the docs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn docs-watch
|
||||||
|
```
|
||||||
|
|
||||||
|
* Open a browser at https://localhost:4200/ and navigate to the document on which you want to work.
|
||||||
|
You can automatically open the browser by using `yarn start -- -o` in the first terminal.
|
||||||
|
|
||||||
|
* Make changes to the page's associated doc or example files. Every time a file is saved, the doc will
|
||||||
|
be regenerated, the app will rebuild and the page will reload.
|
||||||
|
@ -29,6 +29,8 @@ ARG AIO_NGINX_PORT_HTTPS=443
|
|||||||
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
ARG TEST_AIO_NGINX_PORT_HTTPS=4433
|
||||||
ARG AIO_REPO_SLUG=angular/angular
|
ARG AIO_REPO_SLUG=angular/angular
|
||||||
ARG TEST_AIO_REPO_SLUG=test-repo/test-slug
|
ARG TEST_AIO_REPO_SLUG=test-repo/test-slug
|
||||||
|
ARG AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||||
|
ARG TEST_AIO_TRUSTED_PR_LABEL="aio: preview"
|
||||||
ARG AIO_UPLOAD_HOSTNAME=upload.localhost
|
ARG AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||||
ARG TEST_AIO_UPLOAD_HOSTNAME=upload.localhost
|
ARG TEST_AIO_UPLOAD_HOSTNAME=upload.localhost
|
||||||
ARG AIO_UPLOAD_MAX_SIZE=20971520
|
ARG AIO_UPLOAD_MAX_SIZE=20971520
|
||||||
@ -48,9 +50,11 @@ ENV AIO_BUILDS_DIR=$AIO_BUILDS_DIR TEST_AIO_BUILDS_DIR=$TEST
|
|||||||
AIO_REPO_SLUG=$AIO_REPO_SLUG TEST_AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG \
|
AIO_REPO_SLUG=$AIO_REPO_SLUG TEST_AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG \
|
||||||
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
|
||||||
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
|
||||||
|
AIO_TRUSTED_PR_LABEL=$AIO_TRUSTED_PR_LABEL TEST_AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL \
|
||||||
AIO_UPLOAD_HOSTNAME=$AIO_UPLOAD_HOSTNAME TEST_AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME \
|
AIO_UPLOAD_HOSTNAME=$AIO_UPLOAD_HOSTNAME TEST_AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME \
|
||||||
AIO_UPLOAD_MAX_SIZE=$AIO_UPLOAD_MAX_SIZE TEST_AIO_UPLOAD_MAX_SIZE=$TEST_AIO_UPLOAD_MAX_SIZE \
|
AIO_UPLOAD_MAX_SIZE=$AIO_UPLOAD_MAX_SIZE TEST_AIO_UPLOAD_MAX_SIZE=$TEST_AIO_UPLOAD_MAX_SIZE \
|
||||||
AIO_UPLOAD_PORT=$AIO_UPLOAD_PORT TEST_AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT \
|
AIO_UPLOAD_PORT=$AIO_UPLOAD_PORT TEST_AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT \
|
||||||
|
AIO_WWW_USER=www-data \
|
||||||
NODE_ENV=production
|
NODE_ENV=production
|
||||||
|
|
||||||
|
|
||||||
@ -63,6 +67,7 @@ RUN apt-get update -y && apt-get install -y curl
|
|||||||
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash -
|
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash -
|
||||||
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
|
||||||
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
||||||
|
RUN echo "deb http://ftp.debian.org/debian jessie-backports main" | tee /etc/apt/sources.list.d/backports.list
|
||||||
|
|
||||||
|
|
||||||
# Install packages
|
# Install packages
|
||||||
@ -71,14 +76,19 @@ RUN apt-get update -y && apt-get install -y \
|
|||||||
cron \
|
cron \
|
||||||
dnsmasq \
|
dnsmasq \
|
||||||
nano \
|
nano \
|
||||||
nginx \
|
|
||||||
nodejs \
|
nodejs \
|
||||||
openssl \
|
openssl \
|
||||||
rsyslog \
|
rsyslog \
|
||||||
yarn
|
yarn
|
||||||
|
RUN apt-get install -t jessie-backports -y nginx
|
||||||
RUN yarn global add pm2@2
|
RUN yarn global add pm2@2
|
||||||
|
|
||||||
|
|
||||||
|
# Set up log rotation
|
||||||
|
COPY logrotate/* /etc/logrotate.d/
|
||||||
|
RUN chmod 0644 /etc/logrotate.d/*
|
||||||
|
|
||||||
|
|
||||||
# Set up cronjobs
|
# Set up cronjobs
|
||||||
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
|
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
|
||||||
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
|
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
|
||||||
@ -104,31 +114,31 @@ RUN update-ca-certificates
|
|||||||
|
|
||||||
|
|
||||||
# Set up nginx (for production and testing)
|
# Set up nginx (for production and testing)
|
||||||
RUN rm /etc/nginx/sites-enabled/*
|
RUN sed -i -E "s|^user\s+\S+;|user $AIO_WWW_USER;|" /etc/nginx/nginx.conf
|
||||||
|
RUN rm -f /etc/nginx/conf.d/*
|
||||||
|
RUN rm -f /etc/nginx/sites-enabled/*
|
||||||
|
|
||||||
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-prod.conf
|
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$AIO_DOMAIN_NAME|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|g" /etc/nginx/sites-available/aio-builds-prod.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-prod.conf
|
||||||
RUN ln -s /etc/nginx/sites-available/aio-builds-prod.conf /etc/nginx/sites-enabled/aio-builds-prod.conf
|
|
||||||
|
|
||||||
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-test.conf
|
COPY nginx/aio-builds.conf /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$TEST_AIO_DOMAIN_NAME|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_DOMAIN_NAME}}|$TEST_AIO_DOMAIN_NAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_LOCALCERTS_DIR}}|$TEST_AIO_LOCALCERTS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_NGINX_LOGS_DIR}}|$TEST_AIO_NGINX_LOGS_DIR|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTPS}}|$TEST_AIO_NGINX_PORT_HTTPS|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|g" /etc/nginx/sites-available/aio-builds-test.conf
|
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|g" /etc/nginx/conf.d/aio-builds-test.conf
|
||||||
RUN ln -s /etc/nginx/sites-available/aio-builds-test.conf /etc/nginx/sites-enabled/aio-builds-test.conf
|
|
||||||
|
|
||||||
|
|
||||||
# Set up pm2
|
# Set up pm2
|
||||||
|
9
aio/aio-builds-setup/dockerbuild/logrotate/aio-misc
Normal file
9
aio/aio-builds-setup/dockerbuild/logrotate/aio-misc
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
/var/log/aio/clean-up.log /var/log/aio/init.log /var/log/aio/verify-setup.log {
|
||||||
|
compress
|
||||||
|
create
|
||||||
|
delaycompress
|
||||||
|
missingok
|
||||||
|
monthly
|
||||||
|
notifempty
|
||||||
|
rotate 6
|
||||||
|
}
|
13
aio/aio-builds-setup/dockerbuild/logrotate/aio-nginx
Normal file
13
aio/aio-builds-setup/dockerbuild/logrotate/aio-nginx
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
/var/log/aio/nginx/*.log /var/log/aio/nginx-test/*.log {
|
||||||
|
compress
|
||||||
|
create
|
||||||
|
delaycompress
|
||||||
|
missingok
|
||||||
|
monthly
|
||||||
|
notifempty
|
||||||
|
rotate 6
|
||||||
|
sharedscripts
|
||||||
|
postrotate
|
||||||
|
service nginx rotate >/dev/null 2>&1
|
||||||
|
endscript
|
||||||
|
}
|
@ -0,0 +1,9 @@
|
|||||||
|
/var/log/aio/upload-server-*.log {
|
||||||
|
compress
|
||||||
|
copytruncate
|
||||||
|
delaycompress
|
||||||
|
missingok
|
||||||
|
monthly
|
||||||
|
notifempty
|
||||||
|
rotate 6
|
||||||
|
}
|
@ -15,18 +15,24 @@ server {
|
|||||||
|
|
||||||
# Serve PR-preview requests
|
# Serve PR-preview requests
|
||||||
server {
|
server {
|
||||||
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{40})\.";
|
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{7,40})\.";
|
||||||
|
|
||||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl;
|
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl;
|
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||||
|
|
||||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||||
|
|
||||||
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
|
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
|
||||||
disable_symlinks on from=$document_root;
|
disable_symlinks on from=$document_root;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_comp_level 7;
|
||||||
|
gzip_types *;
|
||||||
|
|
||||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||||
|
|
||||||
@ -43,11 +49,13 @@ server {
|
|||||||
server {
|
server {
|
||||||
server_name _;
|
server_name _;
|
||||||
|
|
||||||
listen {{$AIO_NGINX_PORT_HTTPS}} ssl default_server;
|
listen {{$AIO_NGINX_PORT_HTTPS}} ssl http2 default_server;
|
||||||
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl;
|
listen [::]:{{$AIO_NGINX_PORT_HTTPS}} ssl http2;
|
||||||
|
|
||||||
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
ssl_certificate {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.crt;
|
||||||
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
ssl_certificate_key {{$AIO_LOCALCERTS_DIR}}/{{$AIO_DOMAIN_NAME}}.key;
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5;
|
||||||
|
|
||||||
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
access_log {{$AIO_NGINX_LOGS_DIR}}/access.log;
|
||||||
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
error_log {{$AIO_NGINX_LOGS_DIR}}/error.log;
|
||||||
@ -80,6 +88,21 @@ server {
|
|||||||
resolver 127.0.0.1;
|
resolver 127.0.0.1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Notify about PR changes
|
||||||
|
location "~^/pr-updated/?$" {
|
||||||
|
if ($request_method != "POST") {
|
||||||
|
add_header Allow "POST";
|
||||||
|
return 405;
|
||||||
|
}
|
||||||
|
|
||||||
|
proxy_pass_request_headers on;
|
||||||
|
proxy_redirect off;
|
||||||
|
proxy_method POST;
|
||||||
|
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri;
|
||||||
|
|
||||||
|
resolver 127.0.0.1;
|
||||||
|
}
|
||||||
|
|
||||||
# Everything else
|
# Everything else
|
||||||
location / {
|
location / {
|
||||||
return 404;
|
return 404;
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as shell from 'shelljs';
|
import * as shell from 'shelljs';
|
||||||
|
import {HIDDEN_DIR_PREFIX} from '../common/constants';
|
||||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||||
|
|
||||||
@ -31,8 +32,9 @@ export class BuildCleaner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const buildNumbers = files.
|
const buildNumbers = files.
|
||||||
map(Number). // Convert string to number
|
map(name => name.replace(HIDDEN_DIR_PREFIX, '')). // Remove the "hidden dir" prefix
|
||||||
filter(Boolean); // Ignore NaN (or 0), because they are not builds
|
map(Number). // Convert string to number
|
||||||
|
filter(Boolean); // Ignore NaN (or 0), because they are not builds
|
||||||
|
|
||||||
resolve(buildNumbers);
|
resolve(buildNumbers);
|
||||||
});
|
});
|
||||||
@ -49,9 +51,11 @@ export class BuildCleaner {
|
|||||||
|
|
||||||
protected removeDir(dir: string) {
|
protected removeDir(dir: string) {
|
||||||
try {
|
try {
|
||||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
if (shell.test('-d', dir)) {
|
||||||
(shell as any).chmod('-R', 'a+w', dir);
|
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||||
shell.rm('-rf', dir);
|
(shell as any).chmod('-R', 'a+w', dir);
|
||||||
|
shell.rm('-rf', dir);
|
||||||
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
console.error(`ERROR: Unable to remove '${dir}' due to:`, err);
|
||||||
}
|
}
|
||||||
@ -64,8 +68,14 @@ export class BuildCleaner {
|
|||||||
console.log(`Open pull requests: ${openPrNumbers.length}`);
|
console.log(`Open pull requests: ${openPrNumbers.length}`);
|
||||||
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
|
||||||
|
|
||||||
|
// Try removing public dirs.
|
||||||
toRemove.
|
toRemove.
|
||||||
map(num => path.join(this.buildsDir, String(num))).
|
map(num => path.join(this.buildsDir, String(num))).
|
||||||
forEach(dir => this.removeDir(dir));
|
forEach(dir => this.removeDir(dir));
|
||||||
|
|
||||||
|
// Try removing hidden dirs.
|
||||||
|
toRemove.
|
||||||
|
map(num => path.join(this.buildsDir, HIDDEN_DIR_PREFIX + String(num))).
|
||||||
|
forEach(dir => this.removeDir(dir));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,3 @@
|
|||||||
|
// Constants
|
||||||
|
export const HIDDEN_DIR_PREFIX = 'hidden--';
|
||||||
|
export const SHORT_SHA_LEN = 7;
|
@ -63,7 +63,7 @@ export class GithubApi {
|
|||||||
return items;
|
return items;
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.getPaginated(pathname, baseParams, currentPage + 1).then(moreItems => [...items, ...moreItems]);
|
return this.getPaginated<T>(pathname, baseParams, currentPage + 1).then(moreItems => [...items, ...moreItems]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import {GithubApi} from './github-api';
|
|||||||
export interface PullRequest {
|
export interface PullRequest {
|
||||||
number: number;
|
number: number;
|
||||||
user: {login: string};
|
user: {login: string};
|
||||||
|
labels: {name: string}[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export type PullRequestState = 'all' | 'closed' | 'open';
|
export type PullRequestState = 'all' | 'closed' | 'open';
|
||||||
@ -30,7 +31,8 @@ export class GithubPullRequests extends GithubApi {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public fetch(pr: number): Promise<PullRequest> {
|
public fetch(pr: number): Promise<PullRequest> {
|
||||||
return this.get<PullRequest>(`/repos/${this.repoSlug}/pulls/${pr}`);
|
// Using the `/issues/` URL, because the `/pulls/` one does not provide labels.
|
||||||
|
return this.get<PullRequest>(`/repos/${this.repoSlug}/issues/${pr}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
|
||||||
|
@ -4,8 +4,9 @@ import {EventEmitter} from 'events';
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as shell from 'shelljs';
|
import * as shell from 'shelljs';
|
||||||
|
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants';
|
||||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||||
import {CreatedBuildEvent} from './build-events';
|
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||||
import {UploadError} from './upload-error';
|
import {UploadError} from './upload-error';
|
||||||
|
|
||||||
// Classes
|
// Classes
|
||||||
@ -17,16 +18,21 @@ export class BuildCreator extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Methods - Public
|
// Methods - Public
|
||||||
public create(pr: string, sha: string, archivePath: string): Promise<any> {
|
public create(pr: string, sha: string, archivePath: string, isPublic: boolean): Promise<void> {
|
||||||
const prDir = path.join(this.buildsDir, pr);
|
// Use only part of the SHA for more readable URLs.
|
||||||
|
sha = sha.substr(0, SHORT_SHA_LEN);
|
||||||
|
|
||||||
|
const {newPrDir: prDir} = this.getCandidatePrDirs(pr, isPublic);
|
||||||
const shaDir = path.join(prDir, sha);
|
const shaDir = path.join(prDir, sha);
|
||||||
let dirToRemoveOnError: string;
|
let dirToRemoveOnError: string;
|
||||||
|
|
||||||
return Promise.
|
return Promise.resolve().
|
||||||
all([this.exists(prDir), this.exists(shaDir)]).
|
// If the same PR exists with different visibility, update the visibility first.
|
||||||
|
then(() => this.updatePrVisibility(pr, isPublic)).
|
||||||
|
then(() => Promise.all([this.exists(prDir), this.exists(shaDir)])).
|
||||||
then(([prDirExisted, shaDirExisted]) => {
|
then(([prDirExisted, shaDirExisted]) => {
|
||||||
if (shaDirExisted) {
|
if (shaDirExisted) {
|
||||||
throw new UploadError(403, `Request to overwrite existing directory: ${shaDir}`);
|
throw new UploadError(409, `Request to overwrite existing directory: ${shaDir}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
|
||||||
@ -34,7 +40,8 @@ export class BuildCreator extends EventEmitter {
|
|||||||
return Promise.resolve().
|
return Promise.resolve().
|
||||||
then(() => shell.mkdir('-p', shaDir)).
|
then(() => shell.mkdir('-p', shaDir)).
|
||||||
then(() => this.extractArchive(archivePath, shaDir)).
|
then(() => this.extractArchive(archivePath, shaDir)).
|
||||||
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha)));
|
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha, isPublic))).
|
||||||
|
then(() => undefined);
|
||||||
}).
|
}).
|
||||||
catch(err => {
|
catch(err => {
|
||||||
if (dirToRemoveOnError) {
|
if (dirToRemoveOnError) {
|
||||||
@ -49,6 +56,36 @@ export class BuildCreator extends EventEmitter {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public updatePrVisibility(pr: string, makePublic: boolean): Promise<boolean> {
|
||||||
|
const {oldPrDir: otherVisPrDir, newPrDir: targetVisPrDir} = this.getCandidatePrDirs(pr, makePublic);
|
||||||
|
|
||||||
|
return Promise.
|
||||||
|
all([this.exists(otherVisPrDir), this.exists(targetVisPrDir)]).
|
||||||
|
then(([otherVisPrDirExisted, targetVisPrDirExisted]) => {
|
||||||
|
if (!otherVisPrDirExisted) {
|
||||||
|
// No visibility change: Either the visibility is up-to-date or the PR does not exist.
|
||||||
|
return false;
|
||||||
|
} else if (targetVisPrDirExisted) {
|
||||||
|
// Error: Directories for both visibilities exist.
|
||||||
|
throw new UploadError(409, `Request to move '${otherVisPrDir}' to existing directory '${targetVisPrDir}'.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Visibility change: Moving `otherVisPrDir` to `targetVisPrDir`.
|
||||||
|
return Promise.resolve().
|
||||||
|
then(() => shell.mv(otherVisPrDir, targetVisPrDir)).
|
||||||
|
then(() => this.listShasByDate(targetVisPrDir)).
|
||||||
|
then(shas => this.emit(ChangedPrVisibilityEvent.type, new ChangedPrVisibilityEvent(+pr, shas, makePublic))).
|
||||||
|
then(() => true);
|
||||||
|
}).
|
||||||
|
catch(err => {
|
||||||
|
if (!(err instanceof UploadError)) {
|
||||||
|
err = new UploadError(500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\n${err}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Methods - Protected
|
// Methods - Protected
|
||||||
protected exists(fileOrDir: string): Promise<boolean> {
|
protected exists(fileOrDir: string): Promise<boolean> {
|
||||||
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
|
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
|
||||||
@ -78,4 +115,26 @@ export class BuildCreator extends EventEmitter {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected getCandidatePrDirs(pr: string, isPublic: boolean) {
|
||||||
|
const hiddenPrDir = path.join(this.buildsDir, HIDDEN_DIR_PREFIX + pr);
|
||||||
|
const publicPrDir = path.join(this.buildsDir, pr);
|
||||||
|
|
||||||
|
const oldPrDir = isPublic ? hiddenPrDir : publicPrDir;
|
||||||
|
const newPrDir = isPublic ? publicPrDir : hiddenPrDir;
|
||||||
|
|
||||||
|
return {oldPrDir, newPrDir};
|
||||||
|
}
|
||||||
|
|
||||||
|
protected listShasByDate(inputDir: string): Promise<string[]> {
|
||||||
|
return Promise.resolve().
|
||||||
|
then(() => shell.ls('-l', inputDir) as any as Promise<(fs.Stats & {name: string})[]>).
|
||||||
|
// Keep directories only.
|
||||||
|
// (Also, convert to standard Array - ShellJS provides custom `sort()` method for sorting file contents.)
|
||||||
|
then(items => items.filter(item => item.isDirectory())).
|
||||||
|
// Sort by modification date.
|
||||||
|
then(items => items.sort((a, b) => a.mtime.getTime() - b.mtime.getTime())).
|
||||||
|
// Return directory names.
|
||||||
|
then(items => items.map(item => item.name));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
// Classes
|
// Classes
|
||||||
export class BuildEvent {
|
export class ChangedPrVisibilityEvent {
|
||||||
|
// Properties - Public, Static
|
||||||
|
public static type = 'pr.changedVisibility';
|
||||||
|
|
||||||
// Constructor
|
// Constructor
|
||||||
constructor(public type: string, public pr: number, public sha: string) {}
|
constructor(public pr: number, public shas: string[], public isPublic: boolean) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class CreatedBuildEvent extends BuildEvent {
|
export class CreatedBuildEvent {
|
||||||
// Properties - Public, Static
|
// Properties - Public, Static
|
||||||
public static type = 'build.created';
|
public static type = 'build.created';
|
||||||
|
|
||||||
// Constructor
|
// Constructor
|
||||||
constructor(pr: number, sha: string) {
|
constructor(public pr: number, public sha: string, public isPublic: boolean) {}
|
||||||
super(CreatedBuildEvent.type, pr, sha);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import * as jwt from 'jsonwebtoken';
|
import * as jwt from 'jsonwebtoken';
|
||||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
import {GithubPullRequests, PullRequest} from '../common/github-pull-requests';
|
||||||
import {GithubTeams} from '../common/github-teams';
|
import {GithubTeams} from '../common/github-teams';
|
||||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||||
import {UploadError} from './upload-error';
|
import {UploadError} from './upload-error';
|
||||||
@ -11,6 +11,12 @@ interface JwtPayload {
|
|||||||
'pull-request': number;
|
'pull-request': number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Enums
|
||||||
|
export enum BUILD_VERIFICATION_STATUS {
|
||||||
|
verifiedAndTrusted,
|
||||||
|
verifiedNotTrusted,
|
||||||
|
}
|
||||||
|
|
||||||
// Classes
|
// Classes
|
||||||
export class BuildVerifier {
|
export class BuildVerifier {
|
||||||
// Properties - Protected
|
// Properties - Protected
|
||||||
@ -19,27 +25,27 @@ export class BuildVerifier {
|
|||||||
|
|
||||||
// Constructor
|
// Constructor
|
||||||
constructor(protected secret: string, githubToken: string, protected repoSlug: string, organization: string,
|
constructor(protected secret: string, githubToken: string, protected repoSlug: string, organization: string,
|
||||||
protected allowedTeamSlugs: string[]) {
|
protected allowedTeamSlugs: string[], protected trustedPrLabel: string) {
|
||||||
assertNotMissingOrEmpty('secret', secret);
|
assertNotMissingOrEmpty('secret', secret);
|
||||||
assertNotMissingOrEmpty('githubToken', githubToken);
|
assertNotMissingOrEmpty('githubToken', githubToken);
|
||||||
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
assertNotMissingOrEmpty('repoSlug', repoSlug);
|
||||||
assertNotMissingOrEmpty('organization', organization);
|
assertNotMissingOrEmpty('organization', organization);
|
||||||
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
assertNotMissingOrEmpty('allowedTeamSlugs', allowedTeamSlugs && allowedTeamSlugs.join(''));
|
||||||
|
assertNotMissingOrEmpty('trustedPrLabel', trustedPrLabel);
|
||||||
|
|
||||||
this.githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
this.githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||||
this.githubTeams = new GithubTeams(githubToken, organization);
|
this.githubTeams = new GithubTeams(githubToken, organization);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Methods - Public
|
// Methods - Public
|
||||||
public getPrAuthorTeamMembership(pr: number): Promise<{author: string, isMember: boolean}> {
|
public getPrIsTrusted(pr: number): Promise<boolean> {
|
||||||
return Promise.resolve().
|
return Promise.resolve().
|
||||||
then(() => this.githubPullRequests.fetch(pr)).
|
then(() => this.githubPullRequests.fetch(pr)).
|
||||||
then(prInfo => prInfo.user.login).
|
then(prInfo => this.hasLabel(prInfo, this.trustedPrLabel) ||
|
||||||
then(author => this.githubTeams.isMemberBySlug(author, this.allowedTeamSlugs).
|
this.githubTeams.isMemberBySlug(prInfo.user.login, this.allowedTeamSlugs));
|
||||||
then(isMember => ({author, isMember})));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public verify(expectedPr: number, authHeader: string): Promise<void> {
|
public verify(expectedPr: number, authHeader: string): Promise<BUILD_VERIFICATION_STATUS> {
|
||||||
return Promise.resolve().
|
return Promise.resolve().
|
||||||
then(() => this.extractJwtString(authHeader)).
|
then(() => this.extractJwtString(authHeader)).
|
||||||
then(jwtString => this.verifyJwt(expectedPr, jwtString)).
|
then(jwtString => this.verifyJwt(expectedPr, jwtString)).
|
||||||
@ -52,9 +58,13 @@ export class BuildVerifier {
|
|||||||
return input.replace(/^token +/i, '');
|
return input.replace(/^token +/i, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected hasLabel(prInfo: PullRequest, label: string) {
|
||||||
|
return prInfo.labels.some(labelObj => labelObj.name === label);
|
||||||
|
}
|
||||||
|
|
||||||
protected verifyJwt(expectedPr: number, token: string): Promise<JwtPayload> {
|
protected verifyJwt(expectedPr: number, token: string): Promise<JwtPayload> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
jwt.verify(token, this.secret, {issuer: 'Travis CI, GmbH'}, (err, payload) => {
|
jwt.verify(token, this.secret, {issuer: 'Travis CI, GmbH'}, (err, payload: JwtPayload) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err.message || err);
|
reject(err.message || err);
|
||||||
} else if (payload.slug !== this.repoSlug) {
|
} else if (payload.slug !== this.repoSlug) {
|
||||||
@ -68,11 +78,10 @@ export class BuildVerifier {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
protected verifyPr(pr: number): Promise<void> {
|
protected verifyPr(pr: number): Promise<BUILD_VERIFICATION_STATUS> {
|
||||||
return this.getPrAuthorTeamMembership(pr).
|
return this.getPrIsTrusted(pr).
|
||||||
then(({author, isMember}) => isMember ? Promise.resolve() : Promise.reject(
|
then(isTrusted => Promise.resolve(isTrusted ?
|
||||||
`User '${author}' is not an active member of any of the following teams: ` +
|
BUILD_VERIFICATION_STATUS.verifiedAndTrusted :
|
||||||
`${this.allowedTeamSlugs.join(', ')}`,
|
BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,28 +12,28 @@ function _main() {
|
|||||||
const repoSlug = getEnvVar('AIO_REPO_SLUG');
|
const repoSlug = getEnvVar('AIO_REPO_SLUG');
|
||||||
const organization = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
const organization = getEnvVar('AIO_GITHUB_ORGANIZATION');
|
||||||
const allowedTeamSlugs = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
const allowedTeamSlugs = getEnvVar('AIO_GITHUB_TEAM_SLUGS').split(',');
|
||||||
|
const trustedPrLabel = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||||
const pr = +getEnvVar('AIO_PREVERIFY_PR');
|
const pr = +getEnvVar('AIO_PREVERIFY_PR');
|
||||||
|
|
||||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, organization, allowedTeamSlugs);
|
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, organization, allowedTeamSlugs,
|
||||||
|
trustedPrLabel);
|
||||||
|
|
||||||
// Exit codes:
|
// Exit codes:
|
||||||
// - 0: The PR author is a member.
|
// - 0: The PR can be automatically trusted (i.e. author belongs to trusted team or PR has the "trusted PR" label).
|
||||||
// - 1: The PR author is not a member.
|
// - 1: An error occurred.
|
||||||
// - 2: An error occurred.
|
// - 2: The PR cannot be automatically trusted.
|
||||||
buildVerifier.getPrAuthorTeamMembership(pr).
|
buildVerifier.getPrIsTrusted(pr).
|
||||||
then(({author, isMember}) => {
|
then(isTrusted => {
|
||||||
if (isMember) {
|
if (!isTrusted) {
|
||||||
process.exit(0);
|
console.warn(
|
||||||
} else {
|
`The PR cannot be automatically verified, because it doesn't have the "${trustedPrLabel}" label and the ` +
|
||||||
const errorMessage = `User '${author}' is not an active member of any of the following teams: ` +
|
`the author is not an active member of any of the following teams: ${allowedTeamSlugs.join(', ')}`);
|
||||||
`${allowedTeamSlugs.join(', ')}`;
|
|
||||||
onError(errorMessage, 1);
|
|
||||||
}
|
}
|
||||||
}).
|
|
||||||
catch(err => onError(err, 2));
|
|
||||||
}
|
|
||||||
|
|
||||||
function onError(err: string, exitCode: number) {
|
process.exit(isTrusted ? 0 : 2);
|
||||||
console.error(err);
|
}).
|
||||||
process.exit(exitCode || 1);
|
catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
// Imports
|
|
||||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
|
||||||
import {BuildVerifier} from './build-verifier';
|
|
||||||
|
|
||||||
// Run
|
|
||||||
// TODO(gkalpak): Add e2e tests to cover these interactions as well.
|
|
||||||
GithubPullRequests.prototype.addComment = () => Promise.resolve();
|
|
||||||
BuildVerifier.prototype.verify = () => Promise.resolve();
|
|
||||||
// tslint:disable-next-line: no-var-requires
|
|
||||||
require('./index');
|
|
@ -1,6 +1,3 @@
|
|||||||
// TODO(gkalpak): Find more suitable way to run as `www-data`.
|
|
||||||
process.setuid('www-data');
|
|
||||||
|
|
||||||
// Imports
|
// Imports
|
||||||
import {getEnvVar} from '../common/utils';
|
import {getEnvVar} from '../common/utils';
|
||||||
import {uploadServerFactory} from './upload-server-factory';
|
import {uploadServerFactory} from './upload-server-factory';
|
||||||
@ -13,10 +10,13 @@ const AIO_GITHUB_TEAM_SLUGS = getEnvVar('AIO_GITHUB_TEAM_SLUGS');
|
|||||||
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN');
|
||||||
const AIO_PREVIEW_DEPLOYMENT_TOKEN = getEnvVar('AIO_PREVIEW_DEPLOYMENT_TOKEN');
|
const AIO_PREVIEW_DEPLOYMENT_TOKEN = getEnvVar('AIO_PREVIEW_DEPLOYMENT_TOKEN');
|
||||||
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
|
||||||
|
const AIO_TRUSTED_PR_LABEL = getEnvVar('AIO_TRUSTED_PR_LABEL');
|
||||||
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
|
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
|
||||||
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
|
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
|
||||||
|
const AIO_WWW_USER = getEnvVar('AIO_WWW_USER');
|
||||||
|
|
||||||
// Run
|
// Run
|
||||||
|
process.setuid(AIO_WWW_USER); // TODO(gkalpak): Find more suitable way to run as `www-data`.
|
||||||
_main();
|
_main();
|
||||||
|
|
||||||
// Functions
|
// Functions
|
||||||
@ -30,6 +30,7 @@ function _main() {
|
|||||||
githubToken: AIO_GITHUB_TOKEN,
|
githubToken: AIO_GITHUB_TOKEN,
|
||||||
repoSlug: AIO_REPO_SLUG,
|
repoSlug: AIO_REPO_SLUG,
|
||||||
secret: AIO_PREVIEW_DEPLOYMENT_TOKEN,
|
secret: AIO_PREVIEW_DEPLOYMENT_TOKEN,
|
||||||
|
trustedPrLabel: AIO_TRUSTED_PR_LABEL,
|
||||||
}).
|
}).
|
||||||
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
|
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
// Imports
|
// Imports
|
||||||
|
import * as bodyParser from 'body-parser';
|
||||||
import * as express from 'express';
|
import * as express from 'express';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import {GithubPullRequests} from '../common/github-pull-requests';
|
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||||
import {assertNotMissingOrEmpty} from '../common/utils';
|
import {assertNotMissingOrEmpty} from '../common/utils';
|
||||||
import {BuildCreator} from './build-creator';
|
import {BuildCreator} from './build-creator';
|
||||||
import {CreatedBuildEvent} from './build-events';
|
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from './build-events';
|
||||||
import {BuildVerifier} from './build-verifier';
|
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from './build-verifier';
|
||||||
import {UploadError} from './upload-error';
|
import {UploadError} from './upload-error';
|
||||||
|
|
||||||
// Constants
|
// Constants
|
||||||
@ -21,6 +22,7 @@ interface UploadServerConfig {
|
|||||||
githubToken: string;
|
githubToken: string;
|
||||||
repoSlug: string;
|
repoSlug: string;
|
||||||
secret: string;
|
secret: string;
|
||||||
|
trustedPrLabel: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Classes
|
// Classes
|
||||||
@ -34,14 +36,16 @@ class UploadServerFactory {
|
|||||||
githubToken,
|
githubToken,
|
||||||
repoSlug,
|
repoSlug,
|
||||||
secret,
|
secret,
|
||||||
|
trustedPrLabel,
|
||||||
}: UploadServerConfig): http.Server {
|
}: UploadServerConfig): http.Server {
|
||||||
assertNotMissingOrEmpty('domainName', domainName);
|
assertNotMissingOrEmpty('domainName', domainName);
|
||||||
|
|
||||||
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, githubOrganization, githubTeamSlugs);
|
const buildVerifier = new BuildVerifier(secret, githubToken, repoSlug, githubOrganization, githubTeamSlugs,
|
||||||
|
trustedPrLabel);
|
||||||
const buildCreator = this.createBuildCreator(buildsDir, githubToken, repoSlug, domainName);
|
const buildCreator = this.createBuildCreator(buildsDir, githubToken, repoSlug, domainName);
|
||||||
|
|
||||||
const middleware = this.createMiddleware(buildVerifier, buildCreator);
|
const middleware = this.createMiddleware(buildVerifier, buildCreator);
|
||||||
const httpServer = http.createServer(middleware);
|
const httpServer = http.createServer(middleware as any);
|
||||||
|
|
||||||
httpServer.on('listening', () => {
|
httpServer.on('listening', () => {
|
||||||
const info = httpServer.address();
|
const info = httpServer.address();
|
||||||
@ -56,12 +60,24 @@ class UploadServerFactory {
|
|||||||
domainName: string): BuildCreator {
|
domainName: string): BuildCreator {
|
||||||
const buildCreator = new BuildCreator(buildsDir);
|
const buildCreator = new BuildCreator(buildsDir);
|
||||||
const githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
const githubPullRequests = new GithubPullRequests(githubToken, repoSlug);
|
||||||
|
const postPreviewsComment = (pr: number, shas: string[]) => {
|
||||||
|
const body = shas.
|
||||||
|
map(sha => `You can preview ${sha} at https://pr${pr}-${sha}.${domainName}/.`).
|
||||||
|
join('\n');
|
||||||
|
|
||||||
buildCreator.on(CreatedBuildEvent.type, ({pr, sha}: CreatedBuildEvent) => {
|
return githubPullRequests.addComment(pr, body);
|
||||||
const body = `The angular.io preview for ${sha.slice(0, 7)} is available [here][1].\n\n` +
|
};
|
||||||
`[1]: https://pr${pr}-${sha}.${domainName}/`;
|
|
||||||
|
|
||||||
githubPullRequests.addComment(pr, body);
|
buildCreator.on(CreatedBuildEvent.type, ({pr, sha, isPublic}: CreatedBuildEvent) => {
|
||||||
|
if (isPublic) {
|
||||||
|
postPreviewsComment(pr, [sha]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
buildCreator.on(ChangedPrVisibilityEvent.type, ({pr, shas, isPublic}: ChangedPrVisibilityEvent) => {
|
||||||
|
if (isPublic && shas.length) {
|
||||||
|
postPreviewsComment(pr, shas);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return buildCreator;
|
return buildCreator;
|
||||||
@ -69,6 +85,7 @@ class UploadServerFactory {
|
|||||||
|
|
||||||
protected createMiddleware(buildVerifier: BuildVerifier, buildCreator: BuildCreator): express.Express {
|
protected createMiddleware(buildVerifier: BuildVerifier, buildCreator: BuildCreator): express.Express {
|
||||||
const middleware = express();
|
const middleware = express();
|
||||||
|
const jsonParser = bodyParser.json();
|
||||||
|
|
||||||
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
|
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
|
||||||
const pr = req.params[0];
|
const pr = req.params[0];
|
||||||
@ -80,17 +97,33 @@ class UploadServerFactory {
|
|||||||
this.throwRequestError(401, `Missing or empty '${AUTHORIZATION_HEADER}' header`, req);
|
this.throwRequestError(401, `Missing or empty '${AUTHORIZATION_HEADER}' header`, req);
|
||||||
} else if (!archive) {
|
} else if (!archive) {
|
||||||
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
|
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
|
||||||
|
} else {
|
||||||
|
Promise.resolve().
|
||||||
|
then(() => buildVerifier.verify(+pr, authHeader)).
|
||||||
|
then(verStatus => verStatus === BUILD_VERIFICATION_STATUS.verifiedAndTrusted).
|
||||||
|
then(isPublic => buildCreator.create(pr, sha, archive, isPublic).
|
||||||
|
then(() => res.sendStatus(isPublic ? 201 : 202))).
|
||||||
|
catch(err => this.respondWithError(res, err));
|
||||||
}
|
}
|
||||||
|
|
||||||
buildVerifier.
|
|
||||||
verify(+pr, authHeader).
|
|
||||||
then(() => buildCreator.create(pr, sha, archive)).
|
|
||||||
then(() => res.sendStatus(201)).
|
|
||||||
catch(err => this.respondWithError(res, err));
|
|
||||||
});
|
});
|
||||||
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
|
||||||
middleware.get('*', req => this.throwRequestError(404, 'Unknown resource', req));
|
middleware.post(/^\/pr-updated\/?$/, jsonParser, (req, res) => {
|
||||||
middleware.all('*', req => this.throwRequestError(405, 'Unsupported method', req));
|
const {action, number: prNo}: {action?: string, number?: number} = req.body;
|
||||||
|
const visMayHaveChanged = !action || (action === 'labeled') || (action === 'unlabeled');
|
||||||
|
|
||||||
|
if (!visMayHaveChanged) {
|
||||||
|
res.sendStatus(200);
|
||||||
|
} else if (!prNo) {
|
||||||
|
this.throwRequestError(400, `Missing or empty 'number' field`, req);
|
||||||
|
} else {
|
||||||
|
Promise.resolve().
|
||||||
|
then(() => buildVerifier.getPrIsTrusted(prNo)).
|
||||||
|
then(isPublic => buildCreator.updatePrVisibility(String(prNo), isPublic)).
|
||||||
|
then(() => res.sendStatus(200)).
|
||||||
|
catch(err => this.respondWithError(res, err));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
middleware.all('*', req => this.throwRequestError(404, 'Unknown resource', req));
|
||||||
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
|
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
|
||||||
|
|
||||||
return middleware;
|
return middleware;
|
||||||
@ -109,7 +142,10 @@ class UploadServerFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected throwRequestError(status: number, error: string, req: express.Request) {
|
protected throwRequestError(status: number, error: string, req: express.Request) {
|
||||||
throw new UploadError(status, `${error} in request: ${req.method} ${req.originalUrl}`);
|
const message = `${error} in request: ${req.method} ${req.originalUrl}` +
|
||||||
|
(!req.body ? '' : ` ${JSON.stringify(req.body)}`);
|
||||||
|
|
||||||
|
throw new UploadError(status, message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
// Using the values below, we can fake the response of the corresponding methods in tests. This is
|
||||||
|
// necessary, because the test upload-server will be running as a separate node process, so we will
|
||||||
|
// not have direct access to the code (e.g. for mocking).
|
||||||
|
// (See also 'lib/verify-setup/start-test-upload-server.ts'.)
|
||||||
|
|
||||||
|
/* tslint:disable: variable-name */
|
||||||
|
|
||||||
|
// Special values to be used as `authHeader` in `BuildVerifier#verify()`.
|
||||||
|
export const BV_verify_error = 'FAKE_VERIFICATION_ERROR';
|
||||||
|
export const BV_verify_verifiedNotTrusted = 'FAKE_VERIFIED_NOT_TRUSTED';
|
||||||
|
|
||||||
|
// Special values to be used as `pr` in `BuildVerifier#getPrIsTrusted()`.
|
||||||
|
export const BV_getPrIsTrusted_error = 32203;
|
||||||
|
export const BV_getPrIsTrusted_notTrusted = 72457;
|
||||||
|
|
||||||
|
/* tslint:enable: variable-name */
|
@ -4,10 +4,10 @@ import * as fs from 'fs';
|
|||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as shell from 'shelljs';
|
import * as shell from 'shelljs';
|
||||||
|
import {HIDDEN_DIR_PREFIX, SHORT_SHA_LEN} from '../common/constants';
|
||||||
import {getEnvVar} from '../common/utils';
|
import {getEnvVar} from '../common/utils';
|
||||||
|
|
||||||
// Constans
|
// Constans
|
||||||
const SERVER_USER = 'www-data';
|
|
||||||
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR');
|
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR');
|
||||||
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME');
|
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME');
|
||||||
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP');
|
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP');
|
||||||
@ -15,6 +15,7 @@ const TEST_AIO_NGINX_PORT_HTTPS = +getEnvVar('TEST_AIO_NGINX_PORT_HTTPS');
|
|||||||
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
|
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
|
||||||
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
|
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
|
||||||
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
|
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
|
||||||
|
const WWW_USER = getEnvVar('AIO_WWW_USER');
|
||||||
|
|
||||||
// Interfaces - Types
|
// Interfaces - Types
|
||||||
export interface CmdResult { success: boolean; err: Error; stdout: string; stderr: string; }
|
export interface CmdResult { success: boolean; err: Error; stdout: string; stderr: string; }
|
||||||
@ -31,10 +32,10 @@ class Helper {
|
|||||||
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
|
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
|
||||||
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
|
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
|
||||||
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
|
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
|
||||||
public get serverUser() { return SERVER_USER; }
|
|
||||||
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
|
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
|
||||||
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
|
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
|
||||||
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
|
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
|
||||||
|
public get wwwUser() { return WWW_USER; }
|
||||||
|
|
||||||
// Properties - Protected
|
// Properties - Protected
|
||||||
protected cleanUpFns: CleanUpFn[] = [];
|
protected cleanUpFns: CleanUpFn[] = [];
|
||||||
@ -46,10 +47,16 @@ class Helper {
|
|||||||
// Constructor
|
// Constructor
|
||||||
constructor() {
|
constructor() {
|
||||||
shell.mkdir('-p', this.buildsDir);
|
shell.mkdir('-p', this.buildsDir);
|
||||||
shell.exec(`chown -R ${this.serverUser} ${this.buildsDir}`);
|
shell.exec(`chown -R ${this.wwwUser} ${this.buildsDir}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Methods - Public
|
// Methods - Public
|
||||||
|
public buildExists(pr: string, sha = '', isPublic = true, legacy = false): boolean {
|
||||||
|
const prDir = this.getPrDir(pr, isPublic);
|
||||||
|
const dir = !sha ? prDir : this.getShaDir(prDir, sha, legacy);
|
||||||
|
return fs.existsSync(dir);
|
||||||
|
}
|
||||||
|
|
||||||
public cleanUp() {
|
public cleanUp() {
|
||||||
while (this.cleanUpFns.length) {
|
while (this.cleanUpFns.length) {
|
||||||
// Clean-up fns remove themselves from the list.
|
// Clean-up fns remove themselves from the list.
|
||||||
@ -62,11 +69,11 @@ class Helper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn {
|
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn {
|
||||||
const inputDir = path.join(this.buildsDir, 'uploaded', pr, sha);
|
const inputDir = this.getShaDir(this.getPrDir(`uploaded/${pr}`, true), sha);
|
||||||
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
|
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
|
||||||
const cmd2 = `chown ${this.serverUser} ${archivePath}`;
|
const cmd2 = `chown ${this.wwwUser} ${archivePath}`;
|
||||||
|
|
||||||
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true);
|
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true, true);
|
||||||
shell.exec(cmd1);
|
shell.exec(cmd1);
|
||||||
shell.exec(cmd2);
|
shell.exec(cmd2);
|
||||||
cleanUpTemp();
|
cleanUpTemp();
|
||||||
@ -74,21 +81,21 @@ class Helper {
|
|||||||
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
|
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
|
||||||
}
|
}
|
||||||
|
|
||||||
public createDummyBuild(pr: string, sha: string, force = false): CleanUpFn {
|
public createDummyBuild(pr: string, sha: string, isPublic = true, force = false, legacy = false): CleanUpFn {
|
||||||
const prDir = path.join(this.buildsDir, pr);
|
const prDir = this.getPrDir(pr, isPublic);
|
||||||
const shaDir = path.join(prDir, sha);
|
const shaDir = this.getShaDir(prDir, sha, legacy);
|
||||||
const idxPath = path.join(shaDir, 'index.html');
|
const idxPath = path.join(shaDir, 'index.html');
|
||||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||||
|
|
||||||
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
|
||||||
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
|
||||||
shell.exec(`chown -R ${this.serverUser} ${prDir}`);
|
shell.exec(`chown -R ${this.wwwUser} ${prDir}`);
|
||||||
|
|
||||||
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
|
||||||
}
|
}
|
||||||
|
|
||||||
public deletePrDir(pr: string) {
|
public deletePrDir(pr: string, isPublic = true) {
|
||||||
const prDir = path.join(this.buildsDir, pr);
|
const prDir = this.getPrDir(pr, isPublic);
|
||||||
|
|
||||||
if (fs.existsSync(prDir)) {
|
if (fs.existsSync(prDir)) {
|
||||||
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
|
||||||
@ -97,8 +104,22 @@ class Helper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public readBuildFile(pr: string, sha: string, relFilePath: string): string {
|
public getPrDir(pr: string, isPublic: boolean): string {
|
||||||
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
|
const prDirName = isPublic ? pr : HIDDEN_DIR_PREFIX + pr;
|
||||||
|
return path.join(this.buildsDir, prDirName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public getShaDir(prDir: string, sha: string, legacy = false): string {
|
||||||
|
return path.join(prDir, legacy ? sha : this.getShordSha(sha));
|
||||||
|
}
|
||||||
|
|
||||||
|
public getShordSha(sha: string): string {
|
||||||
|
return sha.substr(0, SHORT_SHA_LEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
public readBuildFile(pr: string, sha: string, relFilePath: string, isPublic = true, legacy = false): string {
|
||||||
|
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||||
|
const absFilePath = path.join(shaDir, relFilePath);
|
||||||
return fs.readFileSync(absFilePath, 'utf8');
|
return fs.readFileSync(absFilePath, 'utf8');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,7 +150,8 @@ class Helper {
|
|||||||
const [headers, body] = result.stdout.
|
const [headers, body] = result.stdout.
|
||||||
split(/(?:\r?\n){2,}/).
|
split(/(?:\r?\n){2,}/).
|
||||||
map(s => s.trim()).
|
map(s => s.trim()).
|
||||||
slice(-2);
|
slice(-2); // In case of redirect, discard the previous headers.
|
||||||
|
// Only keep the last to sections (final headers and body).
|
||||||
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
console.log('Stdout:', result.stdout);
|
console.log('Stdout:', result.stdout);
|
||||||
@ -143,8 +165,10 @@ class Helper {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string): CleanUpFn {
|
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string, isPublic = true,
|
||||||
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
|
legacy = false): CleanUpFn {
|
||||||
|
const shaDir = this.getShaDir(this.getPrDir(pr, isPublic), sha, legacy);
|
||||||
|
const absFilePath = path.join(shaDir, relFilePath);
|
||||||
return this.writeFile(absFilePath, {content}, true);
|
return this.writeFile(absFilePath, {content}, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -166,7 +190,7 @@ class Helper {
|
|||||||
// Create a file with the specified content.
|
// Create a file with the specified content.
|
||||||
fs.writeFileSync(filePath, content || '');
|
fs.writeFileSync(filePath, content || '');
|
||||||
}
|
}
|
||||||
shell.exec(`chown ${this.serverUser} ${filePath}`);
|
shell.exec(`chown ${this.wwwUser} ${filePath}`);
|
||||||
|
|
||||||
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
|
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
|
||||||
}
|
}
|
||||||
|
@ -31,111 +31,184 @@ describe(`nginx`, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
h.runForAllSupportedSchemes((scheme, port) => describe(`nginx (on ${scheme.toUpperCase()})`, () => {
|
h.runForAllSupportedSchemes((scheme, port) => describe(`(on ${scheme.toUpperCase()})`, () => {
|
||||||
const hostname = h.nginxHostname;
|
const hostname = h.nginxHostname;
|
||||||
const host = `${hostname}:${port}`;
|
const host = `${hostname}:${port}`;
|
||||||
const pr = '9';
|
const pr = '9';
|
||||||
const sha9 = '9'.repeat(40);
|
const sha9 = '9'.repeat(40);
|
||||||
const sha0 = '0'.repeat(40);
|
const sha0 = '0'.repeat(40);
|
||||||
|
const shortSha9 = h.getShordSha(sha9);
|
||||||
|
const shortSha0 = h.getShordSha(sha0);
|
||||||
|
|
||||||
|
|
||||||
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
describe(`pr<pr>-<sha>.${host}/*`, () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
describe('(for public builds)', () => {
|
||||||
h.createDummyBuild(pr, sha9);
|
|
||||||
h.createDummyBuild(pr, sha0);
|
beforeEach(() => {
|
||||||
|
h.createDummyBuild(pr, sha9);
|
||||||
|
h.createDummyBuild(pr, sha0);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return /index.html', done => {
|
||||||
|
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||||
|
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return /index.html (for legacy builds)', done => {
|
||||||
|
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||||
|
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr, sha9, true, false, true);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return /foo/bar.js', done => {
|
||||||
|
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||||
|
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/bar.js`).
|
||||||
|
then(h.verifyResponse(200, bodyRegex)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return /foo/bar.js (for legacy builds)', done => {
|
||||||
|
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||||
|
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr, sha9, true, false, true);
|
||||||
|
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/bar.js`).
|
||||||
|
then(h.verifyResponse(200, bodyRegex)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 403 for directories', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo`).then(h.verifyResponse(403)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 for unknown paths to files', done => {
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}/foo/baz.css`).
|
||||||
|
then(h.verifyResponse(404)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
||||||
|
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||||
|
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 for unknown PRs/SHAs', done => {
|
||||||
|
const otherPr = 54321;
|
||||||
|
const otherShortSha = h.getShordSha('8'.repeat(40));
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}9.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherShortSha}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 if the subdomain format is wrong', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://prx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://xx${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://p${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://r${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://${pr}-${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}_${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject PRs with leading zeros', done => {
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${shortSha9}.${host}`).
|
||||||
|
then(h.verifyResponse(404)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||||
|
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
||||||
|
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${shortSha9}.${host}`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
||||||
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${shortSha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return /index.html', done => {
|
describe('(for hidden builds)', () => {
|
||||||
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
|
||||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
|
||||||
|
|
||||||
Promise.all([
|
it('should respond with 404 for any file or directory', done => {
|
||||||
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyRegex)),
|
const origin = `${scheme}://pr${pr}-${shortSha9}.${host}`;
|
||||||
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyRegex)),
|
const assert404 = h.verifyResponse(404);
|
||||||
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyRegex)),
|
|
||||||
]).then(done);
|
h.createDummyBuild(pr, sha9, false);
|
||||||
});
|
expect(h.buildExists(pr, sha9, false)).toBe(true);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return /foo/bar.js', done => {
|
it('should respond with 404 for any file or directory (for legacy builds)', done => {
|
||||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
|
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
|
||||||
|
const assert404 = h.verifyResponse(404);
|
||||||
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/bar.js`).
|
h.createDummyBuild(pr, sha9, false, false, true);
|
||||||
then(h.verifyResponse(200, bodyRegex)).
|
expect(h.buildExists(pr, sha9, false, true)).toBe(true);
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iL ${origin}/index.html`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/bar.js`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo/`).then(assert404),
|
||||||
|
h.runCmd(`curl -iL ${origin}/foo`).then(assert404),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
it('should respond with 403 for directories', done => {
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/`).then(h.verifyResponse(403)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo`).then(h.verifyResponse(403)),
|
|
||||||
]).then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 404 for unknown paths to files', done => {
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz.css`).
|
|
||||||
then(h.verifyResponse(404)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should rewrite to \'index.html\' for unknown paths that don\'t look like files', done => {
|
|
||||||
const bodyRegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz`).then(h.verifyResponse(200, bodyRegex)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz/`).then(h.verifyResponse(200, bodyRegex)),
|
|
||||||
]).then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 404 for unknown PRs/SHAs', done => {
|
|
||||||
const otherPr = 54321;
|
|
||||||
const otherSha = '8'.repeat(40);
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}9.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherSha}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
]).then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 404 if the subdomain format is wrong', done => {
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://prx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://xx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://p${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://r${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}_${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
]).then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should reject PRs with leading zeros', done => {
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${sha9}.${host}`).
|
|
||||||
then(h.verifyResponse(404)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
|
||||||
const bodyRegex9 = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
|
|
||||||
const bodyRegex0 = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${sha9}.${host}`).then(h.verifyResponse(404)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}`).then(h.verifyResponse(200, bodyRegex9)),
|
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha0}.${host}`).then(h.verifyResponse(200, bodyRegex0)),
|
|
||||||
]).then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -244,9 +317,54 @@ describe(`nginx`, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe(`${host}/pr-updated`, () => {
|
||||||
|
const url = `${scheme}://${host}/pr-updated`;
|
||||||
|
|
||||||
|
|
||||||
|
it('should disallow non-POST requests', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||||
|
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||||
|
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||||
|
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should pass requests through to the upload server', done => {
|
||||||
|
const cmdPrefix = `curl -iLX POST --header "Content-Type: application/json"`;
|
||||||
|
|
||||||
|
const cmd1 = `${cmdPrefix} ${url}`;
|
||||||
|
const cmd2 = `${cmdPrefix} --data '{"number":${pr}}' ${url}`;
|
||||||
|
const cmd3 = `${cmdPrefix} --data '{"number":${pr},"action":"foo"}' ${url}`;
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(cmd1).then(h.verifyResponse(400, /Missing or empty 'number' field/)),
|
||||||
|
h.runCmd(cmd2).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(cmd3).then(h.verifyResponse(200)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 for unknown paths', done => {
|
||||||
|
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
describe(`${host}/*`, () => {
|
describe(`${host}/*`, () => {
|
||||||
|
|
||||||
it('should respond with 404 for unkown URLs (even if the resource exists)', done => {
|
it('should respond with 404 for unknown URLs (even if the resource exists)', done => {
|
||||||
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
|
||||||
const absFilePath = path.join(h.buildsDir, relFilePath);
|
const absFilePath = path.join(h.buildsDir, relFilePath);
|
||||||
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as c from './constants';
|
||||||
import {helper as h} from './helper';
|
import {helper as h} from './helper';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
@ -12,73 +13,307 @@ h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme
|
|||||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||||
|
|
||||||
const getFile = (pr: string, sha: string, file: string) =>
|
const getFile = (pr: string, sha: string, file: string) =>
|
||||||
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha}.${host}/${file}`);
|
h.runCmd(`curl -iL ${scheme}://pr${pr}-${h.getShordSha(sha)}.${host}/${file}`);
|
||||||
const uploadBuild = (pr: string, sha: string, archive: string) => {
|
const uploadBuild = (pr: string, sha: string, archive: string, authHeader = 'Token FOO') => {
|
||||||
const curlPost = 'curl -iLX POST --header "Authorization: Token FOO"';
|
const curlPost = `curl -iLX POST --header "Authorization: ${authHeader}"`;
|
||||||
return h.runCmd(`${curlPost} --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
|
return h.runCmd(`${curlPost} --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
|
||||||
};
|
};
|
||||||
|
const prUpdated = (pr: number, action?: string) => {
|
||||||
|
const url = `${scheme}://${host}/pr-updated`;
|
||||||
|
const payloadStr = JSON.stringify({number: pr, action});
|
||||||
|
return h.runCmd(`curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`);
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
h.deletePrDir(pr9);
|
h.deletePrDir(pr9);
|
||||||
|
h.deletePrDir(pr9, false);
|
||||||
h.cleanUp();
|
h.cleanUp();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should be able to upload and serve a build for a new PR', done => {
|
describe('for a new/non-existing PR', () => {
|
||||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
|
||||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
|
||||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
|
||||||
|
|
||||||
h.createDummyArchive(pr9, sha9, archivePath);
|
it('should be able to upload and serve a public build', done => {
|
||||||
|
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath).
|
||||||
|
then(() => Promise.all([
|
||||||
|
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||||
|
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||||
|
])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should be able to upload but not serve a hidden build', done => {
|
||||||
|
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||||
|
then(() => Promise.all([
|
||||||
|
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
|
||||||
|
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||||
|
])).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||||
|
expect(h.buildExists(pr9, sha9, false)).toBe(true);
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject an upload if verification fails', done => {
|
||||||
|
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`);
|
||||||
|
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error).
|
||||||
|
then(h.verifyResponse(403, errorRegex9)).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr9)).toBe(false);
|
||||||
|
expect(h.buildExists(pr9, '', false)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should be able to notify that a PR has been updated (and do nothing)', done => {
|
||||||
|
prUpdated(+pr9).
|
||||||
|
then(h.verifyResponse(200)).
|
||||||
|
then(() => {
|
||||||
|
// The PR should still not exist.
|
||||||
|
expect(h.buildExists(pr9, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(pr9, '', true)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
uploadBuild(pr9, sha9, archivePath).
|
|
||||||
then(() => Promise.all([
|
|
||||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
|
||||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
|
||||||
])).
|
|
||||||
then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should be able to upload and serve a build for an existing PR', done => {
|
describe('for an existing PR', () => {
|
||||||
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
|
||||||
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
|
||||||
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
|
||||||
|
|
||||||
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
it('should be able to upload and serve a public build', done => {
|
||||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
||||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
||||||
|
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
h.createDummyBuild(pr9, sha0);
|
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
h.createDummyArchive(pr9, sha9, archivePath);
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
uploadBuild(pr9, sha9, archivePath).
|
h.createDummyBuild(pr9, sha0);
|
||||||
then(() => Promise.all([
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)),
|
|
||||||
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)),
|
uploadBuild(pr9, sha9, archivePath).
|
||||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
then(() => Promise.all([
|
||||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)),
|
||||||
])).
|
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)),
|
||||||
then(done);
|
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||||
});
|
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||||
|
])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should not be able to overwrite a build', done => {
|
it('should be able to upload but not serve a hidden build', done => {
|
||||||
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
|
||||||
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
|
||||||
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
h.createDummyBuild(pr9, sha9);
|
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
h.createDummyArchive(pr9, sha9, archivePath);
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr9, sha0, false);
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||||
|
then(() => Promise.all([
|
||||||
|
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(404)),
|
||||||
|
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||||
|
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(404)),
|
||||||
|
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(404)),
|
||||||
|
])).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||||
|
expect(h.buildExists(pr9, sha9, false)).toBe(true);
|
||||||
|
expect(h.readBuildFile(pr9, sha0, 'index.html', false)).toMatch(idxContentRegex0);
|
||||||
|
expect(h.readBuildFile(pr9, sha0, 'foo/bar.js', false)).toMatch(barContentRegex0);
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject an upload if verification fails', done => {
|
||||||
|
const errorRegex9 = new RegExp(`Error while verifying upload for PR ${pr9}: Test`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr9, sha0);
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath, c.BV_verify_error).
|
||||||
|
then(h.verifyResponse(403, errorRegex9)).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr9)).toBe(true);
|
||||||
|
expect(h.buildExists(pr9, sha0)).toBe(true);
|
||||||
|
expect(h.buildExists(pr9, sha9)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not be able to overwrite an existing public build', done => {
|
||||||
|
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr9, sha9);
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath).
|
||||||
|
then(h.verifyResponse(409)).
|
||||||
|
then(() => Promise.all([
|
||||||
|
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
||||||
|
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
||||||
|
])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not be able to overwrite an existing hidden build', done => {
|
||||||
|
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
|
||||||
|
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
|
||||||
|
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr9, sha9, false);
|
||||||
|
h.createDummyArchive(pr9, sha9, archivePath);
|
||||||
|
|
||||||
|
uploadBuild(pr9, sha9, archivePath, c.BV_verify_verifiedNotTrusted).
|
||||||
|
then(h.verifyResponse(409)).
|
||||||
|
then(() => {
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'index.html', false)).toMatch(idxContentRegex9);
|
||||||
|
expect(h.readBuildFile(pr9, sha9, 'foo/bar.js', false)).toMatch(barContentRegex9);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should be able to request re-checking visibility (if outdated)', done => {
|
||||||
|
const publicPr = pr9;
|
||||||
|
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||||
|
|
||||||
|
h.createDummyBuild(publicPr, sha9, false);
|
||||||
|
h.createDummyBuild(hiddenPr, sha9, true);
|
||||||
|
|
||||||
|
// PR visibilities are outdated (i.e. the opposte of what the should).
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||||
|
|
||||||
|
Promise.
|
||||||
|
all([
|
||||||
|
prUpdated(+publicPr).then(h.verifyResponse(200)),
|
||||||
|
prUpdated(+hiddenPr).then(h.verifyResponse(200)),
|
||||||
|
]).
|
||||||
|
then(() => {
|
||||||
|
// PR visibilities should have been updated.
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(() => {
|
||||||
|
h.deletePrDir(publicPr, true);
|
||||||
|
h.deletePrDir(hiddenPr, false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should be able to request re-checking visibility (if up-to-date)', done => {
|
||||||
|
const publicPr = pr9;
|
||||||
|
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||||
|
|
||||||
|
h.createDummyBuild(publicPr, sha9, true);
|
||||||
|
h.createDummyBuild(hiddenPr, sha9, false);
|
||||||
|
|
||||||
|
// PR visibilities are already up-to-date.
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||||
|
|
||||||
|
Promise.
|
||||||
|
all([
|
||||||
|
prUpdated(+publicPr).then(h.verifyResponse(200)),
|
||||||
|
prUpdated(+hiddenPr).then(h.verifyResponse(200)),
|
||||||
|
]).
|
||||||
|
then(() => {
|
||||||
|
// PR visibilities are still up-to-date.
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject a request if re-checking visibility fails', done => {
|
||||||
|
const errorPr = String(c.BV_getPrIsTrusted_error);
|
||||||
|
|
||||||
|
h.createDummyBuild(errorPr, sha9, true);
|
||||||
|
|
||||||
|
expect(h.buildExists(errorPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(errorPr, '', true)).toBe(true);
|
||||||
|
|
||||||
|
prUpdated(+errorPr).
|
||||||
|
then(h.verifyResponse(500, /Test/)).
|
||||||
|
then(() => {
|
||||||
|
// PR visibility should not have been updated.
|
||||||
|
expect(h.buildExists(errorPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(errorPr, '', true)).toBe(true);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject a request if updating visibility fails', done => {
|
||||||
|
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||||
|
h.createDummyBuild(pr9, sha9, false);
|
||||||
|
h.createDummyBuild(pr9, sha9, true);
|
||||||
|
|
||||||
|
const hiddenPrDir = h.getPrDir(pr9, false);
|
||||||
|
const publicPrDir = h.getPrDir(pr9, true);
|
||||||
|
const bodyRegex = new RegExp(`Request to move '${hiddenPrDir}' to existing directory '${publicPrDir}'`);
|
||||||
|
|
||||||
|
expect(h.buildExists(pr9, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(pr9, '', true)).toBe(true);
|
||||||
|
|
||||||
|
prUpdated(+pr9).
|
||||||
|
then(h.verifyResponse(409, bodyRegex)).
|
||||||
|
then(() => {
|
||||||
|
// PR visibility should not have been updated.
|
||||||
|
expect(h.buildExists(pr9, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(pr9, '', true)).toBe(true);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
uploadBuild(pr9, sha9, archivePath).
|
|
||||||
then(h.verifyResponse(403)).
|
|
||||||
then(() => Promise.all([
|
|
||||||
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
|
|
||||||
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
|
|
||||||
])).
|
|
||||||
then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
}));
|
}));
|
||||||
|
@ -0,0 +1,38 @@
|
|||||||
|
// Imports
|
||||||
|
import {GithubPullRequests} from '../common/github-pull-requests';
|
||||||
|
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../upload-server/build-verifier';
|
||||||
|
import {UploadError} from '../upload-server/upload-error';
|
||||||
|
import * as c from './constants';
|
||||||
|
|
||||||
|
// Run
|
||||||
|
// TODO(gkalpak): Add e2e tests to cover these interactions as well.
|
||||||
|
GithubPullRequests.prototype.addComment = () => Promise.resolve();
|
||||||
|
BuildVerifier.prototype.getPrIsTrusted = (pr: number) => {
|
||||||
|
switch (pr) {
|
||||||
|
case c.BV_getPrIsTrusted_error:
|
||||||
|
// For e2e tests, fake an error.
|
||||||
|
return Promise.reject('Test');
|
||||||
|
case c.BV_getPrIsTrusted_notTrusted:
|
||||||
|
// For e2e tests, fake an untrusted PR (`false`).
|
||||||
|
return Promise.resolve(false);
|
||||||
|
default:
|
||||||
|
// For e2e tests, default to trusted PRs (`true`).
|
||||||
|
return Promise.resolve(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
BuildVerifier.prototype.verify = (expectedPr: number, authHeader: string) => {
|
||||||
|
switch (authHeader) {
|
||||||
|
case c.BV_verify_error:
|
||||||
|
// For e2e tests, fake a verification error.
|
||||||
|
return Promise.reject(new UploadError(403, `Error while verifying upload for PR ${expectedPr}: Test`));
|
||||||
|
case c.BV_verify_verifiedNotTrusted:
|
||||||
|
// For e2e tests, fake a `verifiedNotTrusted` verification status.
|
||||||
|
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted);
|
||||||
|
default:
|
||||||
|
// For e2e tests, default to `verifiedAndTrusted` verification status.
|
||||||
|
return Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedAndTrusted);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// tslint:disable-next-line: no-var-requires
|
||||||
|
require('../upload-server/index');
|
@ -1,6 +1,7 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as c from './constants';
|
||||||
import {CmdResult, helper as h} from './helper';
|
import {CmdResult, helper as h} from './helper';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
@ -19,18 +20,19 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
describe(`${host}/create-build/<pr>/<sha>`, () => {
|
||||||
const authorizationHeader = `--header "Authorization: Token FOO"`;
|
const authorizationHeader = `--header "Authorization: Token FOO"`;
|
||||||
const xFileHeader = `--header "X-File: ${h.buildsDir}/snapshot.tar.gz"`;
|
const xFileHeader = `--header "X-File: ${h.buildsDir}/snapshot.tar.gz"`;
|
||||||
const curl = `curl -iL ${authorizationHeader} ${xFileHeader}`;
|
const defaultHeaders = `${authorizationHeader} ${xFileHeader}`;
|
||||||
|
const curl = (url: string, headers = defaultHeaders) => `curl -iL ${headers} ${url}`;
|
||||||
|
|
||||||
|
|
||||||
it('should disallow non-GET requests', done => {
|
it('should disallow non-GET requests', done => {
|
||||||
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||||
const bodyRegex = /^Unsupported method/;
|
const bodyRegex = /^Unknown resource/;
|
||||||
|
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405, bodyRegex)),
|
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
]).then(done);
|
]).then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -42,8 +44,8 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
const bodyRegex = /^Missing or empty 'AUTHORIZATION' header/;
|
const bodyRegex = /^Missing or empty 'AUTHORIZATION' header/;
|
||||||
|
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`curl -iL ${headers1} ${url}`).then(h.verifyResponse(401, bodyRegex)),
|
h.runCmd(curl(url, headers1)).then(h.verifyResponse(401, bodyRegex)),
|
||||||
h.runCmd(`curl -iL ${headers2} ${url}`).then(h.verifyResponse(401, bodyRegex)),
|
h.runCmd(curl(url, headers2)).then(h.verifyResponse(401, bodyRegex)),
|
||||||
]).then(done);
|
]).then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -55,14 +57,25 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
const bodyRegex = /^Missing or empty 'X-FILE' header/;
|
const bodyRegex = /^Missing or empty 'X-FILE' header/;
|
||||||
|
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`curl -iL ${headers1} ${url}`).then(h.verifyResponse(400, bodyRegex)),
|
h.runCmd(curl(url, headers1)).then(h.verifyResponse(400, bodyRegex)),
|
||||||
h.runCmd(`curl -iL ${headers2} ${url}`).then(h.verifyResponse(400, bodyRegex)),
|
h.runCmd(curl(url, headers2)).then(h.verifyResponse(400, bodyRegex)),
|
||||||
]).then(done);
|
]).then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject requests for which the PR verification fails', done => {
|
||||||
|
const headers = `--header "Authorization: ${c.BV_verify_error}" ${xFileHeader}`;
|
||||||
|
const url = `http://${host}/create-build/${pr}/${sha9}`;
|
||||||
|
const bodyRegex = new RegExp(`Error while verifying upload for PR ${pr}: Test`);
|
||||||
|
|
||||||
|
h.runCmd(curl(url, headers)).
|
||||||
|
then(h.verifyResponse(403, bodyRegex)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 404 for unknown paths', done => {
|
it('should respond with 404 for unknown paths', done => {
|
||||||
const cmdPrefix = `${curl} http://${host}`;
|
const cmdPrefix = curl(`http://${host}`);
|
||||||
|
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
|
||||||
@ -78,7 +91,7 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
|
|
||||||
|
|
||||||
it('should reject PRs with leading zeros', done => {
|
it('should reject PRs with leading zeros', done => {
|
||||||
h.runCmd(`${curl} http://${host}/create-build/0${pr}/${sha9}`).
|
h.runCmd(curl(`http://${host}/create-build/0${pr}/${sha9}`)).
|
||||||
then(h.verifyResponse(404)).
|
then(h.verifyResponse(404)).
|
||||||
then(done);
|
then(done);
|
||||||
});
|
});
|
||||||
@ -86,129 +99,253 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
|
|
||||||
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
it('should accept SHAs with leading zeros (but not trim the zeros)', done => {
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/0${sha9}`).then(h.verifyResponse(404)),
|
h.runCmd(curl(`http://${host}/create-build/${pr}/0${sha9}`)).then(h.verifyResponse(404)),
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).then(h.verifyResponse(500)),
|
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha9}`)).then(h.verifyResponse(500)),
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha0}`).then(h.verifyResponse(500)),
|
h.runCmd(curl(`http://${host}/create-build/${pr}/${sha0}`)).then(h.verifyResponse(500)),
|
||||||
]).then(done);
|
]).then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should not overwrite existing builds', done => {
|
[true, false].forEach(isPublic => describe(`(for ${isPublic ? 'public' : 'hidden'} builds)`, () => {
|
||||||
h.createDummyBuild(pr, sha9);
|
const authorizationHeader2 = isPublic ?
|
||||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain('index.html');
|
authorizationHeader : `--header "Authorization: ${c.BV_verify_verifiedNotTrusted}"`;
|
||||||
|
const cmdPrefix = curl('', `${authorizationHeader2} ${xFileHeader}`);
|
||||||
h.writeBuildFile(pr, sha9, 'index.html', 'My content');
|
|
||||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content');
|
|
||||||
|
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
|
||||||
then(h.verifyResponse(403, /^Request to overwrite existing directory/)).
|
|
||||||
then(() => expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content')).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should delete the PR directory on error (for new PR)', done => {
|
it('should not overwrite existing builds', done => {
|
||||||
const prDir = path.join(h.buildsDir, pr);
|
h.createDummyBuild(pr, sha9, isPublic);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||||
|
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
|
||||||
then(h.verifyResponse(500)).
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
|
||||||
then(() => expect(fs.existsSync(prDir)).toBe(false)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||||
it('should only delete the SHA directory on error (for existing PR)', done => {
|
then(h.verifyResponse(409, /^Request to overwrite existing directory/)).
|
||||||
const prDir = path.join(h.buildsDir, pr);
|
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
|
||||||
const shaDir = path.join(prDir, sha9);
|
then(done);
|
||||||
|
|
||||||
h.createDummyBuild(pr, sha0);
|
|
||||||
|
|
||||||
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
|
|
||||||
then(h.verifyResponse(500)).
|
|
||||||
then(() => {
|
|
||||||
expect(fs.existsSync(shaDir)).toBe(false);
|
|
||||||
expect(fs.existsSync(prDir)).toBe(true);
|
|
||||||
}).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
describe('on successful upload', () => {
|
|
||||||
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
|
||||||
let uploadPromise: Promise<CmdResult>;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
h.createDummyArchive(pr, sha9, archivePath);
|
|
||||||
uploadPromise = h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`);
|
|
||||||
});
|
|
||||||
afterEach(() => h.deletePrDir(pr));
|
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 201', done => {
|
|
||||||
uploadPromise.then(h.verifyResponse(201)).then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should extract the contents of the uploaded file', done => {
|
it('should not overwrite existing builds (even if the SHA is different)', done => {
|
||||||
uploadPromise.
|
// Since only the first few characters of the SHA are used, it is possible for two different
|
||||||
|
// SHAs to correspond to the same directory. In that case, we don't want the second SHA to
|
||||||
|
// overwrite the first.
|
||||||
|
|
||||||
|
const sha9Almost = sha9.replace(/.$/, '8');
|
||||||
|
expect(sha9Almost).not.toBe(sha9);
|
||||||
|
|
||||||
|
h.createDummyBuild(pr, sha9, isPublic);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||||
|
|
||||||
|
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content');
|
||||||
|
|
||||||
|
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9Almost}`).
|
||||||
|
then(h.verifyResponse(409, /^Request to overwrite existing directory/)).
|
||||||
|
then(() => expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toBe('My content')).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should delete the PR directory on error (for new PR)', done => {
|
||||||
|
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||||
|
then(h.verifyResponse(500)).
|
||||||
|
then(() => expect(h.buildExists(pr, '', isPublic)).toBe(false)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should only delete the SHA directory on error (for existing PR)', done => {
|
||||||
|
h.createDummyBuild(pr, sha0, isPublic);
|
||||||
|
|
||||||
|
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||||
|
then(h.verifyResponse(500)).
|
||||||
then(() => {
|
then(() => {
|
||||||
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain(`uploaded/${pr}`);
|
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
|
||||||
expect(h.readBuildFile(pr, sha9, 'foo/bar.js')).toContain(`uploaded/${pr}`);
|
expect(h.buildExists(pr, '', isPublic)).toBe(true);
|
||||||
}).
|
}).
|
||||||
then(done);
|
then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it(`should create files/directories owned by '${h.serverUser}'`, done => {
|
describe('on successful upload', () => {
|
||||||
const shaDir = path.join(h.buildsDir, pr, sha9);
|
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||||
const idxPath = path.join(shaDir, 'index.html');
|
const statusCode = isPublic ? 201 : 202;
|
||||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
let uploadPromise: Promise<CmdResult>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
h.createDummyArchive(pr, sha9, archivePath);
|
||||||
|
uploadPromise = h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`);
|
||||||
|
});
|
||||||
|
afterEach(() => h.deletePrDir(pr, isPublic));
|
||||||
|
|
||||||
|
|
||||||
|
it(`should respond with ${statusCode}`, done => {
|
||||||
|
uploadPromise.then(h.verifyResponse(statusCode)).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should extract the contents of the uploaded file', done => {
|
||||||
|
uploadPromise.
|
||||||
|
then(() => {
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'foo/bar.js', isPublic)).toContain(`uploaded/${pr}`);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it(`should create files/directories owned by '${h.wwwUser}'`, done => {
|
||||||
|
const prDir = h.getPrDir(pr, isPublic);
|
||||||
|
const shaDir = h.getShaDir(prDir, sha9);
|
||||||
|
const idxPath = path.join(shaDir, 'index.html');
|
||||||
|
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||||
|
|
||||||
|
uploadPromise.
|
||||||
|
then(() => Promise.all([
|
||||||
|
h.runCmd(`find ${shaDir}`),
|
||||||
|
h.runCmd(`find ${shaDir} -user ${h.wwwUser}`),
|
||||||
|
])).
|
||||||
|
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
|
||||||
|
expect(userFiles).toBe(allFiles);
|
||||||
|
expect(userFiles).toContain(shaDir);
|
||||||
|
expect(userFiles).toContain(idxPath);
|
||||||
|
expect(userFiles).toContain(barPath);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should delete the uploaded file', done => {
|
||||||
|
expect(fs.existsSync(archivePath)).toBe(true);
|
||||||
|
uploadPromise.
|
||||||
|
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should make the build directory non-writable', done => {
|
||||||
|
const prDir = h.getPrDir(pr, isPublic);
|
||||||
|
const shaDir = h.getShaDir(prDir, sha9);
|
||||||
|
const idxPath = path.join(shaDir, 'index.html');
|
||||||
|
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
||||||
|
|
||||||
|
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
||||||
|
const isNotWritable = (fileOrDir: string) => {
|
||||||
|
const mode = fs.statSync(fileOrDir).mode;
|
||||||
|
// tslint:disable-next-line: no-bitwise
|
||||||
|
return !(mode & parseInt('222', 8));
|
||||||
|
};
|
||||||
|
|
||||||
|
uploadPromise.
|
||||||
|
then(() => {
|
||||||
|
expect(isNotWritable(shaDir)).toBe(true);
|
||||||
|
expect(isNotWritable(idxPath)).toBe(true);
|
||||||
|
expect(isNotWritable(barPath)).toBe(true);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should ignore a legacy 40-chars long build directory (even if it starts with the same chars)', done => {
|
||||||
|
// It is possible that 40-chars long build directories exist, if they had been deployed
|
||||||
|
// before implementing the shorter build directory names. In that case, we don't want the
|
||||||
|
// second (shorter) name to be considered the same as the old one (even if they originate
|
||||||
|
// from the same SHA).
|
||||||
|
|
||||||
|
h.createDummyBuild(pr, sha9, isPublic, false, true);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toContain('index.html');
|
||||||
|
|
||||||
|
h.writeBuildFile(pr, sha9, 'index.html', 'My content', isPublic, true);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
|
||||||
|
|
||||||
|
h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha9}`).
|
||||||
|
then(h.verifyResponse(statusCode)).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
|
||||||
|
expect(h.buildExists(pr, sha9, isPublic, true)).toBe(true);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain('index.html');
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic, true)).toBe('My content');
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
uploadPromise.
|
|
||||||
then(() => Promise.all([
|
|
||||||
h.runCmd(`find ${shaDir}`),
|
|
||||||
h.runCmd(`find ${shaDir} -user ${h.serverUser}`),
|
|
||||||
])).
|
|
||||||
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
|
|
||||||
expect(userFiles).toBe(allFiles);
|
|
||||||
expect(userFiles).toContain(shaDir);
|
|
||||||
expect(userFiles).toContain(idxPath);
|
|
||||||
expect(userFiles).toContain(barPath);
|
|
||||||
}).
|
|
||||||
then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should delete the uploaded file', done => {
|
describe('when the PR\'s visibility has changed', () => {
|
||||||
expect(fs.existsSync(archivePath)).toBe(true);
|
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
|
||||||
uploadPromise.
|
const statusCode = isPublic ? 201 : 202;
|
||||||
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
const checkPrVisibility = (isPublic2: boolean) => {
|
||||||
it('should make the build directory non-writable', done => {
|
expect(h.buildExists(pr, '', isPublic2)).toBe(true);
|
||||||
const shaDir = path.join(h.buildsDir, pr, sha9);
|
expect(h.buildExists(pr, '', !isPublic2)).toBe(false);
|
||||||
const idxPath = path.join(shaDir, 'index.html');
|
expect(h.buildExists(pr, sha0, isPublic2)).toBe(true);
|
||||||
const barPath = path.join(shaDir, 'foo', 'bar.js');
|
expect(h.buildExists(pr, sha0, !isPublic2)).toBe(false);
|
||||||
|
|
||||||
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
|
|
||||||
const isNotWritable = (fileOrDir: string) => {
|
|
||||||
const mode = fs.statSync(fileOrDir).mode;
|
|
||||||
// tslint:disable-next-line: no-bitwise
|
|
||||||
return !(mode & parseInt('222', 8));
|
|
||||||
};
|
};
|
||||||
|
const uploadBuild = (sha: string) => h.runCmd(`${cmdPrefix} http://${host}/create-build/${pr}/${sha}`);
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
h.createDummyBuild(pr, sha0, !isPublic);
|
||||||
|
h.createDummyArchive(pr, sha9, archivePath);
|
||||||
|
checkPrVisibility(!isPublic);
|
||||||
|
});
|
||||||
|
afterEach(() => h.deletePrDir(pr, isPublic));
|
||||||
|
|
||||||
|
|
||||||
|
it('should update the PR\'s visibility', done => {
|
||||||
|
uploadBuild(sha9).
|
||||||
|
then(h.verifyResponse(statusCode)).
|
||||||
|
then(() => {
|
||||||
|
checkPrVisibility(isPublic);
|
||||||
|
expect(h.buildExists(pr, sha9, isPublic)).toBe(true);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(`uploaded/${pr}`);
|
||||||
|
expect(h.readBuildFile(pr, sha9, 'index.html', isPublic)).toContain(sha9);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not overwrite existing builds (but keep the updated visibility)', done => {
|
||||||
|
expect(h.buildExists(pr, sha0, isPublic)).toBe(false);
|
||||||
|
|
||||||
|
uploadBuild(sha0).
|
||||||
|
then(h.verifyResponse(409, /^Request to overwrite existing directory/)).
|
||||||
|
then(() => {
|
||||||
|
checkPrVisibility(isPublic);
|
||||||
|
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(pr);
|
||||||
|
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(`uploaded/${pr}`);
|
||||||
|
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).toContain(sha0);
|
||||||
|
expect(h.readBuildFile(pr, sha0, 'index.html', isPublic)).not.toContain(sha9);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject the request if it fails to update the PR\'s visibility', done => {
|
||||||
|
// One way to cause an error is to have both a public and a hidden directory for the same PR.
|
||||||
|
h.createDummyBuild(pr, sha0, isPublic);
|
||||||
|
|
||||||
|
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
|
||||||
|
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
|
||||||
|
|
||||||
|
const errorRegex = new RegExp(`^Request to move '${h.getPrDir(pr, !isPublic)}' ` +
|
||||||
|
`to existing directory '${h.getPrDir(pr, isPublic)}'.`);
|
||||||
|
|
||||||
|
uploadBuild(sha9).
|
||||||
|
then(h.verifyResponse(409, errorRegex)).
|
||||||
|
then(() => {
|
||||||
|
expect(h.buildExists(pr, sha0, isPublic)).toBe(true);
|
||||||
|
expect(h.buildExists(pr, sha0, !isPublic)).toBe(true);
|
||||||
|
expect(h.buildExists(pr, sha9, isPublic)).toBe(false);
|
||||||
|
expect(h.buildExists(pr, sha9, !isPublic)).toBe(false);
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
uploadPromise.
|
|
||||||
then(() => {
|
|
||||||
expect(isNotWritable(shaDir)).toBe(true);
|
|
||||||
expect(isNotWritable(idxPath)).toBe(true);
|
|
||||||
expect(isNotWritable(barPath)).toBe(true);
|
|
||||||
}).
|
|
||||||
then(done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
}));
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -237,27 +374,194 @@ describe('upload-server (on HTTP)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe(`${host}/pr-updated`, () => {
|
||||||
|
const url = `http://${host}/pr-updated`;
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
const curl = (payload?: {number: number, action?: string}) => {
|
||||||
|
const payloadStr = payload && JSON.stringify(payload) || '';
|
||||||
|
return `curl -iLX POST --header "Content-Type: application/json" --data '${payloadStr}' ${url}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
it('should disallow non-POST requests', done => {
|
||||||
|
const bodyRegex = /^Unknown resource in request/;
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 400 for requests without a payload', done => {
|
||||||
|
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||||
|
|
||||||
|
h.runCmd(curl()).
|
||||||
|
then(h.verifyResponse(400, bodyRegex)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 400 for requests without a \'number\' field', done => {
|
||||||
|
const bodyRegex = /^Missing or empty 'number' field in request/;
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(curl({} as any)).then(h.verifyResponse(400, bodyRegex)),
|
||||||
|
h.runCmd(curl({number: null} as any)).then(h.verifyResponse(400, bodyRegex)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject requests for which checking the PR visibility fails', done => {
|
||||||
|
h.runCmd(curl({number: c.BV_getPrIsTrusted_error})).
|
||||||
|
then(h.verifyResponse(500, /Test/)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 for unknown paths', done => {
|
||||||
|
const mockPayload = JSON.stringify({number: +pr});
|
||||||
|
const cmdPrefix = `curl -iLX POST --data "${mockPayload}" http://${host}`;
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(`${cmdPrefix}/foo/pr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/foo-pr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/foonpr-updated`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updated/foo`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updated-foo`).then(h.verifyResponse(404)),
|
||||||
|
h.runCmd(`${cmdPrefix}/pr-updatednfoo`).then(h.verifyResponse(404)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should do nothing if PR\'s visibility is already up-to-date', done => {
|
||||||
|
const publicPr = pr;
|
||||||
|
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||||
|
const checkVisibilities = () => {
|
||||||
|
// Public build is already public.
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||||
|
// Hidden build is already hidden.
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
h.createDummyBuild(publicPr, sha9, true);
|
||||||
|
h.createDummyBuild(hiddenPr, sha9, false);
|
||||||
|
checkVisibilities();
|
||||||
|
|
||||||
|
Promise.
|
||||||
|
all([
|
||||||
|
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||||
|
]).
|
||||||
|
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||||
|
then(checkVisibilities).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should do nothing if \'action\' implies no visibility change', done => {
|
||||||
|
const publicPr = pr;
|
||||||
|
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||||
|
const checkVisibilities = () => {
|
||||||
|
// Public build is hidden atm.
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||||
|
// Hidden build is public atm.
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
h.createDummyBuild(publicPr, sha9, false);
|
||||||
|
h.createDummyBuild(hiddenPr, sha9, true);
|
||||||
|
checkVisibilities();
|
||||||
|
|
||||||
|
Promise.
|
||||||
|
all([
|
||||||
|
h.runCmd(curl({number: +publicPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(curl({number: +hiddenPr, action: 'foo'})).then(h.verifyResponse(200)),
|
||||||
|
]).
|
||||||
|
// Visibilities should not have changed, because the specified action could not have triggered a change.
|
||||||
|
then(checkVisibilities).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('when the visiblity has changed', () => {
|
||||||
|
const publicPr = pr;
|
||||||
|
const hiddenPr = String(c.BV_getPrIsTrusted_notTrusted);
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create initial PR builds with opposite visibilities as the ones that will be reported:
|
||||||
|
// - The now public PR was previously hidden.
|
||||||
|
// - The now hidden PR was previously public.
|
||||||
|
h.createDummyBuild(publicPr, sha9, false);
|
||||||
|
h.createDummyBuild(hiddenPr, sha9, true);
|
||||||
|
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(false);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(true);
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
// Expect PRs' visibility to have been updated:
|
||||||
|
// - The public PR should be actually public (previously it was hidden).
|
||||||
|
// - The hidden PR should be actually hidden (previously it was public).
|
||||||
|
expect(h.buildExists(publicPr, '', false)).toBe(false);
|
||||||
|
expect(h.buildExists(publicPr, '', true)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', false)).toBe(true);
|
||||||
|
expect(h.buildExists(hiddenPr, '', true)).toBe(false);
|
||||||
|
|
||||||
|
h.deletePrDir(publicPr, true);
|
||||||
|
h.deletePrDir(hiddenPr, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should update the PR\'s visibility (action: undefined)', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(curl({number: +publicPr})).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(curl({number: +hiddenPr})).then(h.verifyResponse(200)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should update the PR\'s visibility (action: labeled)', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(curl({number: +publicPr, action: 'labeled'})).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(curl({number: +hiddenPr, action: 'labeled'})).then(h.verifyResponse(200)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should update the PR\'s visibility (action: unlabeled)', done => {
|
||||||
|
Promise.all([
|
||||||
|
h.runCmd(curl({number: +publicPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
|
||||||
|
h.runCmd(curl({number: +hiddenPr, action: 'unlabeled'})).then(h.verifyResponse(200)),
|
||||||
|
]).then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
describe(`${host}/*`, () => {
|
describe(`${host}/*`, () => {
|
||||||
|
|
||||||
it('should respond with 404 for GET requests to unknown URLs', done => {
|
it('should respond with 404 for requests to unknown URLs', done => {
|
||||||
const bodyRegex = /^Unknown resource/;
|
const bodyRegex = /^Unknown resource/;
|
||||||
|
|
||||||
Promise.all([
|
Promise.all([
|
||||||
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
]).then(done);
|
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
});
|
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
|
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(404, bodyRegex)),
|
||||||
it('should respond with 405 for non-GET requests to any URL', done => {
|
|
||||||
const bodyRegex = /^Unsupported method/;
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
|
||||||
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
|
||||||
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
|
||||||
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(405, bodyRegex)),
|
|
||||||
]).then(done);
|
]).then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -6,26 +6,28 @@
|
|||||||
"author": "Angular",
|
"author": "Angular",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "yarn run clean",
|
"prebuild": "yarn clean-dist",
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"build-watch": "yarn run tsc -- --watch",
|
"build-watch": "yarn tsc -- --watch",
|
||||||
"clean": "node --eval \"require('shelljs').rm('-rf', 'dist')\"",
|
"clean-dist": "node --eval \"require('shelljs').rm('-rf', 'dist')\"",
|
||||||
"dev": "concurrently --kill-others --raw --success first \"yarn run build-watch\" \"yarn run test-watch\"",
|
"dev": "concurrently --kill-others --raw --success first \"yarn build-watch\" \"yarn test-watch\"",
|
||||||
"lint": "tslint --project tsconfig.json",
|
"lint": "tslint --project tsconfig.json",
|
||||||
"pre~~test-only": "yarn run lint",
|
"pre~~test-only": "yarn lint",
|
||||||
"~~test-only": "node dist/test",
|
"~~test-only": "node dist/test",
|
||||||
"pretest": "yarn run build",
|
"pretest": "yarn build",
|
||||||
"test": "yarn run ~~test-only",
|
"test": "yarn ~~test-only",
|
||||||
"pretest-watch": "yarn run build",
|
"pretest-watch": "yarn build",
|
||||||
"test-watch": "nodemon --exec \"yarn run ~~test-only\" --watch dist"
|
"test-watch": "nodemon --exec \"yarn ~~test-only\" --watch dist"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"body-parser": "^1.17.2",
|
||||||
"express": "^4.14.1",
|
"express": "^4.14.1",
|
||||||
"jasmine": "^2.5.3",
|
"jasmine": "^2.5.3",
|
||||||
"jsonwebtoken": "^7.3.0",
|
"jsonwebtoken": "^7.3.0",
|
||||||
"shelljs": "^0.7.6"
|
"shelljs": "^0.7.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/body-parser": "^1.16.4",
|
||||||
"@types/express": "^4.0.35",
|
"@types/express": "^4.0.35",
|
||||||
"@types/jasmine": "^2.5.43",
|
"@types/jasmine": "^2.5.43",
|
||||||
"@types/jsonwebtoken": "^7.2.0",
|
"@types/jsonwebtoken": "^7.2.0",
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
import * as shell from 'shelljs';
|
import * as shell from 'shelljs';
|
||||||
import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
|
import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
|
||||||
|
import {HIDDEN_DIR_PREFIX} from '../../lib/common/constants';
|
||||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
@ -114,7 +116,7 @@ describe('BuildCleaner', () => {
|
|||||||
|
|
||||||
it('should resolve with the value returned by \'removeUnnecessaryBuilds()\'', done => {
|
it('should resolve with the value returned by \'removeUnnecessaryBuilds()\'', done => {
|
||||||
promise.then(result => {
|
promise.then(result => {
|
||||||
expect(result).toBe('Test');
|
expect(result as any).toBe('Test');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -170,6 +172,16 @@ describe('BuildCleaner', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should remove `HIDDEN_DIR_PREFIX` from the filenames', done => {
|
||||||
|
promise.then(result => {
|
||||||
|
expect(result).toEqual([12, 34, 56]);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
readdirCb(null, [`${HIDDEN_DIR_PREFIX}12`, '34', `${HIDDEN_DIR_PREFIX}56`]);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should ignore files with non-numeric (or zero) names', done => {
|
it('should ignore files with non-numeric (or zero) names', done => {
|
||||||
promise.then(result => {
|
promise.then(result => {
|
||||||
expect(result).toEqual([12, 34, 56]);
|
expect(result).toEqual([12, 34, 56]);
|
||||||
@ -230,10 +242,22 @@ describe('BuildCleaner', () => {
|
|||||||
describe('removeDir()', () => {
|
describe('removeDir()', () => {
|
||||||
let shellChmodSpy: jasmine.Spy;
|
let shellChmodSpy: jasmine.Spy;
|
||||||
let shellRmSpy: jasmine.Spy;
|
let shellRmSpy: jasmine.Spy;
|
||||||
|
let shellTestSpy: jasmine.Spy;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
shellChmodSpy = spyOn(shell, 'chmod');
|
shellChmodSpy = spyOn(shell, 'chmod');
|
||||||
shellRmSpy = spyOn(shell, 'rm');
|
shellRmSpy = spyOn(shell, 'rm');
|
||||||
|
shellTestSpy = spyOn(shell, 'test').and.returnValue(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should test if the directory exists (and return if is does not)', () => {
|
||||||
|
shellTestSpy.and.returnValue(false);
|
||||||
|
(cleaner as any).removeDir('/foo/bar');
|
||||||
|
|
||||||
|
expect(shellTestSpy).toHaveBeenCalledWith('-d', '/foo/bar');
|
||||||
|
expect(shellChmodSpy).not.toHaveBeenCalled();
|
||||||
|
expect(shellRmSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -287,17 +311,28 @@ describe('BuildCleaner', () => {
|
|||||||
it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
|
it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
|
||||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []);
|
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []);
|
||||||
|
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/2'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3'));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should try removing hidden directories as well', () => {
|
||||||
|
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []);
|
||||||
|
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should remove the builds that do not correspond to open PRs', () => {
|
it('should remove the builds that do not correspond to open PRs', () => {
|
||||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
|
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(2);
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3'));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||||
cleanerRemoveDirSpy.calls.reset();
|
cleanerRemoveDirSpy.calls.reset();
|
||||||
|
|
||||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
|
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
|
||||||
@ -305,11 +340,15 @@ describe('BuildCleaner', () => {
|
|||||||
cleanerRemoveDirSpy.calls.reset();
|
cleanerRemoveDirSpy.calls.reset();
|
||||||
|
|
||||||
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(8);
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/1'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/2'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/3'));
|
||||||
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/4');
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize('/foo/bar/4'));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}1`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}2`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}3`));
|
||||||
|
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith(path.normalize(`/foo/bar/${HIDDEN_DIR_PREFIX}4`));
|
||||||
cleanerRemoveDirSpy.calls.reset();
|
cleanerRemoveDirSpy.calls.reset();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -292,7 +292,7 @@ describe('GithubApi', () => {
|
|||||||
|
|
||||||
|
|
||||||
describe('onResponse', () => {
|
describe('onResponse', () => {
|
||||||
let promise: Promise<void>;
|
let promise: Promise<Object>;
|
||||||
let respond: (statusCode: number) => IncomingMessage;
|
let respond: (statusCode: number) => IncomingMessage;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -66,7 +66,7 @@ describe('GithubPullRequests', () => {
|
|||||||
|
|
||||||
it('should resolve with the returned response', done => {
|
it('should resolve with the returned response', done => {
|
||||||
prs.addComment(42, 'body').then(data => {
|
prs.addComment(42, 'body').then(data => {
|
||||||
expect(data).toEqual('Test');
|
expect(data as any).toBe('Test');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -76,6 +76,30 @@ describe('GithubPullRequests', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('fetch()', () => {
|
||||||
|
let prs: GithubPullRequests;
|
||||||
|
let prsGetSpy: jasmine.Spy;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
prs = new GithubPullRequests('12345', 'foo/bar');
|
||||||
|
prsGetSpy = spyOn(prs as any, 'get');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should call \'get()\' with the correct pathname', () => {
|
||||||
|
prs.fetch(42);
|
||||||
|
expect(prsGetSpy).toHaveBeenCalledWith('/repos/foo/bar/issues/42');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should forward the value returned by \'get()\'', () => {
|
||||||
|
prsGetSpy.and.returnValue('Test');
|
||||||
|
expect(prs.fetch(42) as any).toBe('Test');
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
describe('fetchAll()', () => {
|
describe('fetchAll()', () => {
|
||||||
let prs: GithubPullRequests;
|
let prs: GithubPullRequests;
|
||||||
let prsGetPaginatedSpy: jasmine.Spy;
|
let prsGetPaginatedSpy: jasmine.Spy;
|
||||||
@ -109,7 +133,7 @@ describe('GithubPullRequests', () => {
|
|||||||
|
|
||||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||||
prsGetPaginatedSpy.and.returnValue('Test');
|
prsGetPaginatedSpy.and.returnValue('Test');
|
||||||
expect(prs.fetchAll()).toBe('Test');
|
expect(prs.fetchAll() as any).toBe('Test');
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -38,7 +38,7 @@ describe('GithubTeams', () => {
|
|||||||
|
|
||||||
it('should forward the value returned by \'getPaginated()\'', () => {
|
it('should forward the value returned by \'getPaginated()\'', () => {
|
||||||
teamsGetPaginatedSpy.and.returnValue('Test');
|
teamsGetPaginatedSpy.and.returnValue('Test');
|
||||||
expect(teams.fetchAll()).toBe('Test');
|
expect(teams.fetchAll() as any).toBe('Test');
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -50,12 +50,16 @@ describe('GithubTeams', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
teams = new GithubTeams('12345', 'foo');
|
teams = new GithubTeams('12345', 'foo');
|
||||||
teamsGetSpy = spyOn(teams, 'get');
|
teamsGetSpy = spyOn(teams, 'get').and.returnValue(Promise.resolve(null));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return a promise', () => {
|
it('should return a promise', done => {
|
||||||
expect(teams.isMemberById('user', [1])).toEqual(jasmine.any(Promise));
|
const promise = teams.isMemberById('user', [1]);
|
||||||
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
|
// to avoid running the actual `get()`.
|
||||||
|
|
||||||
|
expect(promise).toEqual(jasmine.any(Promise));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -69,7 +73,6 @@ describe('GithubTeams', () => {
|
|||||||
|
|
||||||
|
|
||||||
it('should call \'get()\' with the correct pathname', done => {
|
it('should call \'get()\' with the correct pathname', done => {
|
||||||
teamsGetSpy.and.returnValue(Promise.resolve(null));
|
|
||||||
teams.isMemberById('user', [1]).then(() => {
|
teams.isMemberById('user', [1]).then(() => {
|
||||||
expect(teamsGetSpy).toHaveBeenCalledWith('/teams/1/memberships/user');
|
expect(teamsGetSpy).toHaveBeenCalledWith('/teams/1/memberships/user');
|
||||||
done();
|
done();
|
||||||
|
@ -2,9 +2,11 @@
|
|||||||
import * as cp from 'child_process';
|
import * as cp from 'child_process';
|
||||||
import {EventEmitter} from 'events';
|
import {EventEmitter} from 'events';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
import * as shell from 'shelljs';
|
import * as shell from 'shelljs';
|
||||||
|
import {SHORT_SHA_LEN} from '../../lib/common/constants';
|
||||||
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
||||||
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||||
import {UploadError} from '../../lib/upload-server/upload-error';
|
import {UploadError} from '../../lib/upload-server/upload-error';
|
||||||
import {expectToBeUploadError} from './helpers';
|
import {expectToBeUploadError} from './helpers';
|
||||||
|
|
||||||
@ -12,10 +14,13 @@ import {expectToBeUploadError} from './helpers';
|
|||||||
describe('BuildCreator', () => {
|
describe('BuildCreator', () => {
|
||||||
const pr = '9';
|
const pr = '9';
|
||||||
const sha = '9'.repeat(40);
|
const sha = '9'.repeat(40);
|
||||||
|
const shortSha = sha.substr(0, SHORT_SHA_LEN);
|
||||||
const archive = 'snapshot.tar.gz';
|
const archive = 'snapshot.tar.gz';
|
||||||
const buildsDir = 'builds/dir';
|
const buildsDir = 'builds/dir';
|
||||||
const prDir = `${buildsDir}/${pr}`;
|
const hiddenPrDir = path.join(buildsDir, `hidden--${pr}`);
|
||||||
const shaDir = `${prDir}/${sha}`;
|
const publicPrDir = path.join(buildsDir, pr);
|
||||||
|
const hiddenShaDir = path.join(hiddenPrDir, shortSha);
|
||||||
|
const publicShaDir = path.join(publicPrDir, shortSha);
|
||||||
let bc: BuildCreator;
|
let bc: BuildCreator;
|
||||||
|
|
||||||
beforeEach(() => bc = new BuildCreator(buildsDir));
|
beforeEach(() => bc = new BuildCreator(buildsDir));
|
||||||
@ -42,6 +47,7 @@ describe('BuildCreator', () => {
|
|||||||
let bcEmitSpy: jasmine.Spy;
|
let bcEmitSpy: jasmine.Spy;
|
||||||
let bcExistsSpy: jasmine.Spy;
|
let bcExistsSpy: jasmine.Spy;
|
||||||
let bcExtractArchiveSpy: jasmine.Spy;
|
let bcExtractArchiveSpy: jasmine.Spy;
|
||||||
|
let bcUpdatePrVisibilitySpy: jasmine.Spy;
|
||||||
let shellMkdirSpy: jasmine.Spy;
|
let shellMkdirSpy: jasmine.Spy;
|
||||||
let shellRmSpy: jasmine.Spy;
|
let shellRmSpy: jasmine.Spy;
|
||||||
|
|
||||||
@ -49,13 +55,214 @@ describe('BuildCreator', () => {
|
|||||||
bcEmitSpy = spyOn(bc, 'emit');
|
bcEmitSpy = spyOn(bc, 'emit');
|
||||||
bcExistsSpy = spyOn(bc as any, 'exists');
|
bcExistsSpy = spyOn(bc as any, 'exists');
|
||||||
bcExtractArchiveSpy = spyOn(bc as any, 'extractArchive');
|
bcExtractArchiveSpy = spyOn(bc as any, 'extractArchive');
|
||||||
|
bcUpdatePrVisibilitySpy = spyOn(bc, 'updatePrVisibility');
|
||||||
shellMkdirSpy = spyOn(shell, 'mkdir');
|
shellMkdirSpy = spyOn(shell, 'mkdir');
|
||||||
shellRmSpy = spyOn(shell, 'rm');
|
shellRmSpy = spyOn(shell, 'rm');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
[true, false].forEach(isPublic => {
|
||||||
|
const prDir = isPublic ? publicPrDir : hiddenPrDir;
|
||||||
|
const shaDir = isPublic ? publicShaDir : hiddenShaDir;
|
||||||
|
|
||||||
|
|
||||||
|
it('should return a promise', done => {
|
||||||
|
const promise = bc.create(pr, sha, archive, isPublic);
|
||||||
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
|
// to avoid running the actual `extractArchive()`.
|
||||||
|
|
||||||
|
expect(promise).toEqual(jasmine.any(Promise));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should update the PR\'s visibility first if necessary', done => {
|
||||||
|
bcUpdatePrVisibilitySpy.and.callFake(() => expect(shellMkdirSpy).not.toHaveBeenCalled());
|
||||||
|
|
||||||
|
bc.create(pr, sha, archive, isPublic).
|
||||||
|
then(() => {
|
||||||
|
expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith(pr, isPublic);
|
||||||
|
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should create the build directory (and any parent directories)', done => {
|
||||||
|
bc.create(pr, sha, archive, isPublic).
|
||||||
|
then(() => expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should extract the archive contents into the build directory', done => {
|
||||||
|
bc.create(pr, sha, archive, isPublic).
|
||||||
|
then(() => expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should emit a CreatedBuildEvent on success', done => {
|
||||||
|
let emitted = false;
|
||||||
|
|
||||||
|
bcEmitSpy.and.callFake((type: string, evt: CreatedBuildEvent) => {
|
||||||
|
expect(type).toBe(CreatedBuildEvent.type);
|
||||||
|
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
||||||
|
expect(evt.pr).toBe(+pr);
|
||||||
|
expect(evt.sha).toBe(shortSha);
|
||||||
|
expect(evt.isPublic).toBe(isPublic);
|
||||||
|
|
||||||
|
emitted = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
bc.create(pr, sha, archive, isPublic).
|
||||||
|
then(() => expect(emitted).toBe(true)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('on error', () => {
|
||||||
|
let existsValues: {[dir: string]: boolean};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
existsValues = {
|
||||||
|
[prDir]: false,
|
||||||
|
[shaDir]: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
bcExistsSpy.and.callFake((dir: string) => existsValues[dir]);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should abort and skip further operations if changing the PR\'s visibility fails', done => {
|
||||||
|
const mockError = new UploadError(543, 'Test');
|
||||||
|
bcUpdatePrVisibilitySpy.and.returnValue(Promise.reject(mockError));
|
||||||
|
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||||
|
expect(err).toBe(mockError);
|
||||||
|
|
||||||
|
expect(bcExistsSpy).not.toHaveBeenCalled();
|
||||||
|
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should abort and skip further operations if the build does already exist', done => {
|
||||||
|
existsValues[shaDir] = true;
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 409, `Request to overwrite existing directory: ${shaDir}`);
|
||||||
|
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should detect existing build directory after visibility change', done => {
|
||||||
|
bcUpdatePrVisibilitySpy.and.callFake(() => existsValues[prDir] = existsValues[shaDir] = true);
|
||||||
|
|
||||||
|
expect(bcExistsSpy(prDir)).toBe(false);
|
||||||
|
expect(bcExistsSpy(shaDir)).toBe(false);
|
||||||
|
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 409, `Request to overwrite existing directory: ${shaDir}`);
|
||||||
|
expect(shellMkdirSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should abort and skip further operations if it fails to create the directories', done => {
|
||||||
|
shellMkdirSpy.and.throwError('');
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||||
|
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||||
|
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should abort and skip further operations if it fails to extract the archive', done => {
|
||||||
|
bcExtractArchiveSpy.and.throwError('');
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||||
|
expect(shellMkdirSpy).toHaveBeenCalled();
|
||||||
|
expect(bcExtractArchiveSpy).toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should delete the PR directory (for new PR)', done => {
|
||||||
|
bcExtractArchiveSpy.and.throwError('');
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||||
|
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should delete the SHA directory (for existing PR)', done => {
|
||||||
|
existsValues[prDir] = true;
|
||||||
|
bcExtractArchiveSpy.and.throwError('');
|
||||||
|
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(() => {
|
||||||
|
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject with an UploadError', done => {
|
||||||
|
shellMkdirSpy.and.callFake(() => { throw 'Test'; });
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 500, `Error while uploading to directory: ${shaDir}\nTest`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should pass UploadError instances unmodified', done => {
|
||||||
|
shellMkdirSpy.and.callFake(() => { throw new UploadError(543, 'Test'); });
|
||||||
|
bc.create(pr, sha, archive, isPublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 543, 'Test');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('updatePrVisibility()', () => {
|
||||||
|
let bcEmitSpy: jasmine.Spy;
|
||||||
|
let bcExistsSpy: jasmine.Spy;
|
||||||
|
let bcListShasByDate: jasmine.Spy;
|
||||||
|
let shellMvSpy: jasmine.Spy;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
bcEmitSpy = spyOn(bc, 'emit');
|
||||||
|
bcExistsSpy = spyOn(bc as any, 'exists');
|
||||||
|
bcListShasByDate = spyOn(bc as any, 'listShasByDate');
|
||||||
|
shellMvSpy = spyOn(shell, 'mv');
|
||||||
|
|
||||||
|
bcExistsSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||||
|
bcListShasByDate.and.returnValue([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return a promise', done => {
|
it('should return a promise', done => {
|
||||||
const promise = bc.create(pr, sha, archive);
|
const promise = bc.updatePrVisibility(pr, true);
|
||||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
// to avoid running the actual `extractArchive()`.
|
// to avoid running the actual `extractArchive()`.
|
||||||
|
|
||||||
@ -63,106 +270,157 @@ describe('BuildCreator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should throw if the build does already exist', done => {
|
[true, false].forEach(makePublic => {
|
||||||
bcExistsSpy.and.returnValue(true);
|
const oldPrDir = makePublic ? hiddenPrDir : publicPrDir;
|
||||||
bc.create(pr, sha, archive).catch(err => {
|
const newPrDir = makePublic ? publicPrDir : hiddenPrDir;
|
||||||
expectToBeUploadError(err, 403, `Request to overwrite existing directory: ${shaDir}`);
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should create the build directory (and any parent directories)', done => {
|
it('should rename the directory', done => {
|
||||||
bc.create(pr, sha, archive).
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
then(() => expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir)).
|
then(() => expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir)).
|
||||||
then(done);
|
then(done);
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should extract the archive contents into the build directory', done => {
|
|
||||||
bc.create(pr, sha, archive).
|
|
||||||
then(() => expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should emit a CreatedBuildEvent on success', done => {
|
|
||||||
let emitted = false;
|
|
||||||
|
|
||||||
bcEmitSpy.and.callFake((type: string, evt: CreatedBuildEvent) => {
|
|
||||||
expect(type).toBe(CreatedBuildEvent.type);
|
|
||||||
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
|
||||||
expect(evt.pr).toBe(+pr);
|
|
||||||
expect(evt.sha).toBe(sha);
|
|
||||||
|
|
||||||
emitted = true;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
bc.create(pr, sha, archive).
|
|
||||||
then(() => expect(emitted).toBe(true)).
|
|
||||||
then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
describe('when the visibility is updated', () => {
|
||||||
|
|
||||||
describe('on error', () => {
|
it('should resolve to true', done => {
|
||||||
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
it('should abort and skip further operations if it fails to create the directories', done => {
|
then(result => expect(result).toBe(true)).
|
||||||
shellMkdirSpy.and.throwError('');
|
then(done);
|
||||||
bc.create(pr, sha, archive).catch(() => {
|
|
||||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
|
||||||
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
|
|
||||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should rename the directory', done => {
|
||||||
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
|
then(() => expect(shellMvSpy).toHaveBeenCalledWith(oldPrDir, newPrDir)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should emit a ChangedPrVisibilityEvent on success', done => {
|
||||||
|
let emitted = false;
|
||||||
|
|
||||||
|
bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => {
|
||||||
|
expect(type).toBe(ChangedPrVisibilityEvent.type);
|
||||||
|
expect(evt).toEqual(jasmine.any(ChangedPrVisibilityEvent));
|
||||||
|
expect(evt.pr).toBe(+pr);
|
||||||
|
expect(evt.shas).toEqual(jasmine.any(Array));
|
||||||
|
expect(evt.isPublic).toBe(makePublic);
|
||||||
|
|
||||||
|
emitted = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
|
then(() => expect(emitted).toBe(true)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should include all shas in the emitted event', done => {
|
||||||
|
const shas = ['foo', 'bar', 'baz'];
|
||||||
|
let emitted = false;
|
||||||
|
|
||||||
|
bcListShasByDate.and.returnValue(Promise.resolve(shas));
|
||||||
|
bcEmitSpy.and.callFake((type: string, evt: ChangedPrVisibilityEvent) => {
|
||||||
|
expect(bcListShasByDate).toHaveBeenCalledWith(newPrDir);
|
||||||
|
|
||||||
|
expect(type).toBe(ChangedPrVisibilityEvent.type);
|
||||||
|
expect(evt).toEqual(jasmine.any(ChangedPrVisibilityEvent));
|
||||||
|
expect(evt.pr).toBe(+pr);
|
||||||
|
expect(evt.shas).toBe(shas);
|
||||||
|
expect(evt.isPublic).toBe(makePublic);
|
||||||
|
|
||||||
|
emitted = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
|
then(() => expect(emitted).toBe(true)).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should abort and skip further operations if it fails to extract the archive', done => {
|
it('should do nothing if the visibility is already up-to-date', done => {
|
||||||
bcExtractArchiveSpy.and.throwError('');
|
bcExistsSpy.and.callFake((dir: string) => dir === newPrDir);
|
||||||
bc.create(pr, sha, archive).catch(() => {
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
expect(shellMkdirSpy).toHaveBeenCalled();
|
then(result => {
|
||||||
expect(bcExtractArchiveSpy).toHaveBeenCalled();
|
expect(result).toBe(false);
|
||||||
expect(bcEmitSpy).not.toHaveBeenCalled();
|
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||||
done();
|
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||||
});
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should delete the PR directory (for new PR)', done => {
|
it('should do nothing if the PR directory does not exist', done => {
|
||||||
bcExtractArchiveSpy.and.throwError('');
|
bcExistsSpy.and.returnValue(false);
|
||||||
bc.create(pr, sha, archive).catch(() => {
|
bc.updatePrVisibility(pr, makePublic).
|
||||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
|
then(result => {
|
||||||
done();
|
expect(result).toBe(false);
|
||||||
});
|
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should delete the SHA directory (for existing PR)', done => {
|
describe('on error', () => {
|
||||||
bcExistsSpy.and.callFake((path: string) => path !== shaDir);
|
|
||||||
bcExtractArchiveSpy.and.throwError('');
|
|
||||||
|
|
||||||
bc.create(pr, sha, archive).catch(() => {
|
it('should abort and skip further operations if both directories exist', done => {
|
||||||
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
|
bcExistsSpy.and.returnValue(true);
|
||||||
done();
|
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 409, `Request to move '${oldPrDir}' to existing directory '${newPrDir}'.`);
|
||||||
|
expect(shellMvSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should reject with an UploadError', done => {
|
it('should abort and skip further operations if it fails to rename the directory', done => {
|
||||||
shellMkdirSpy.and.callFake(() => {throw 'Test'; });
|
shellMvSpy.and.throwError('');
|
||||||
bc.create(pr, sha, archive).catch(err => {
|
bc.updatePrVisibility(pr, makePublic).catch(() => {
|
||||||
expectToBeUploadError(err, 500, `Error while uploading to directory: ${shaDir}\nTest`);
|
expect(shellMvSpy).toHaveBeenCalled();
|
||||||
done();
|
expect(bcListShasByDate).not.toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should pass UploadError instances unmodified', done => {
|
it('should abort and skip further operations if it fails to list the SHAs', done => {
|
||||||
shellMkdirSpy.and.callFake(() => { throw new UploadError(543, 'Test'); });
|
bcListShasByDate.and.throwError('');
|
||||||
bc.create(pr, sha, archive).catch(err => {
|
bc.updatePrVisibility(pr, makePublic).catch(() => {
|
||||||
expectToBeUploadError(err, 543, 'Test');
|
expect(shellMvSpy).toHaveBeenCalled();
|
||||||
done();
|
expect(bcListShasByDate).toHaveBeenCalled();
|
||||||
|
expect(bcEmitSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject with an UploadError', done => {
|
||||||
|
shellMvSpy.and.callFake(() => { throw 'Test'; });
|
||||||
|
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 500, `Error while making PR ${pr} ${makePublic ? 'public' : 'hidden'}.\nTest`);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should pass UploadError instances unmodified', done => {
|
||||||
|
shellMvSpy.and.callFake(() => { throw new UploadError(543, 'Test'); });
|
||||||
|
bc.updatePrVisibility(pr, makePublic).catch(err => {
|
||||||
|
expectToBeUploadError(err, 543, 'Test');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -317,4 +575,101 @@ describe('BuildCreator', () => {
|
|||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('listShasByDate()', () => {
|
||||||
|
let shellLsSpy: jasmine.Spy;
|
||||||
|
const lsResult = (name: string, mtimeMs: number, isDirectory = true) => ({
|
||||||
|
isDirectory: () => isDirectory,
|
||||||
|
mtime: new Date(mtimeMs),
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
shellLsSpy = spyOn(shell, 'ls').and.returnValue([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return a promise', done => {
|
||||||
|
const promise = (bc as any).listShasByDate('input/dir');
|
||||||
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
|
// to avoid running the actual `ls()`.
|
||||||
|
|
||||||
|
expect(promise).toEqual(jasmine.any(Promise));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should `ls()` files with their metadata', done => {
|
||||||
|
(bc as any).listShasByDate('input/dir').
|
||||||
|
then(() => expect(shellLsSpy).toHaveBeenCalledWith('-l', 'input/dir')).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should reject if listing files fails', done => {
|
||||||
|
shellLsSpy.and.returnValue(Promise.reject('Test'));
|
||||||
|
(bc as any).listShasByDate('input/dir').catch((err: string) => {
|
||||||
|
expect(err).toBe('Test');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return the filenames', done => {
|
||||||
|
shellLsSpy.and.returnValue(Promise.resolve([
|
||||||
|
lsResult('foo', 100),
|
||||||
|
lsResult('bar', 200),
|
||||||
|
lsResult('baz', 300),
|
||||||
|
]));
|
||||||
|
|
||||||
|
(bc as any).listShasByDate('input/dir').
|
||||||
|
then((shas: string[]) => expect(shas).toEqual(['foo', 'bar', 'baz'])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should sort by date', done => {
|
||||||
|
shellLsSpy.and.returnValue(Promise.resolve([
|
||||||
|
lsResult('foo', 300),
|
||||||
|
lsResult('bar', 100),
|
||||||
|
lsResult('baz', 200),
|
||||||
|
]));
|
||||||
|
|
||||||
|
(bc as any).listShasByDate('input/dir').
|
||||||
|
then((shas: string[]) => expect(shas).toEqual(['bar', 'baz', 'foo'])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not break with ShellJS\' custom `sort()` method', done => {
|
||||||
|
const mockArray = [
|
||||||
|
lsResult('foo', 300),
|
||||||
|
lsResult('bar', 100),
|
||||||
|
lsResult('baz', 200),
|
||||||
|
];
|
||||||
|
mockArray.sort = jasmine.createSpy('sort');
|
||||||
|
|
||||||
|
shellLsSpy.and.returnValue(Promise.resolve(mockArray));
|
||||||
|
(bc as any).listShasByDate('input/dir').
|
||||||
|
then((shas: string[]) => {
|
||||||
|
expect(shas).toEqual(['bar', 'baz', 'foo']);
|
||||||
|
expect(mockArray.sort).not.toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should only include directories', done => {
|
||||||
|
shellLsSpy.and.returnValue(Promise.resolve([
|
||||||
|
lsResult('foo', 100),
|
||||||
|
lsResult('bar', 200, false),
|
||||||
|
lsResult('baz', 300),
|
||||||
|
]));
|
||||||
|
|
||||||
|
(bc as any).listShasByDate('input/dir').
|
||||||
|
then((shas: string[]) => expect(shas).toEqual(['foo', 'baz'])).
|
||||||
|
then(done);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import {BuildEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
describe('BuildEvent', () => {
|
describe('ChangedPrVisibilityEvent', () => {
|
||||||
let evt: BuildEvent;
|
let evt: ChangedPrVisibilityEvent;
|
||||||
|
|
||||||
beforeEach(() => evt = new BuildEvent('foo', 42, 'bar'));
|
beforeEach(() => evt = new ChangedPrVisibilityEvent(42, ['foo', 'bar'], true));
|
||||||
|
|
||||||
|
|
||||||
it('should have a \'type\' property', () => {
|
it('should have a static \'type\' property', () => {
|
||||||
expect(evt.type).toBe('foo');
|
expect(ChangedPrVisibilityEvent.type).toBe('pr.changedVisibility');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -18,8 +18,13 @@ describe('BuildEvent', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should have a \'sha\' property', () => {
|
it('should have a \'shas\' property', () => {
|
||||||
expect(evt.sha).toBe('bar');
|
expect(evt.shas).toEqual(['foo', 'bar']);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should have an \'isPublic\' property', () => {
|
||||||
|
expect(evt.isPublic).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -28,7 +33,7 @@ describe('BuildEvent', () => {
|
|||||||
describe('CreatedBuildEvent', () => {
|
describe('CreatedBuildEvent', () => {
|
||||||
let evt: CreatedBuildEvent;
|
let evt: CreatedBuildEvent;
|
||||||
|
|
||||||
beforeEach(() => evt = new CreatedBuildEvent(42, 'bar'));
|
beforeEach(() => evt = new CreatedBuildEvent(42, 'bar', true));
|
||||||
|
|
||||||
|
|
||||||
it('should have a static \'type\' property', () => {
|
it('should have a static \'type\' property', () => {
|
||||||
@ -36,19 +41,6 @@ describe('CreatedBuildEvent', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should extend BuildEvent', () => {
|
|
||||||
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
|
|
||||||
expect(evt).toEqual(jasmine.any(BuildEvent));
|
|
||||||
|
|
||||||
expect(Object.getPrototypeOf(evt)).toBe(CreatedBuildEvent.prototype);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should automatically set the \'type\'', () => {
|
|
||||||
expect(evt.type).toBe(CreatedBuildEvent.type);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should have a \'pr\' property', () => {
|
it('should have a \'pr\' property', () => {
|
||||||
expect(evt.pr).toBe(42);
|
expect(evt.pr).toBe(42);
|
||||||
});
|
});
|
||||||
@ -58,4 +50,9 @@ describe('CreatedBuildEvent', () => {
|
|||||||
expect(evt.sha).toBe('bar');
|
expect(evt.sha).toBe('bar');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should have an \'isPublic\' property', () => {
|
||||||
|
expect(evt.isPublic).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
// Imports
|
// Imports
|
||||||
import * as jwt from 'jsonwebtoken';
|
import * as jwt from 'jsonwebtoken';
|
||||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
import {GithubPullRequests, PullRequest} from '../../lib/common/github-pull-requests';
|
||||||
import {GithubTeams} from '../../lib/common/github-teams';
|
import {GithubTeams} from '../../lib/common/github-teams';
|
||||||
import {BuildVerifier} from '../../lib/upload-server/build-verifier';
|
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../../lib/upload-server/build-verifier';
|
||||||
import {expectToBeUploadError} from './helpers';
|
import {expectToBeUploadError} from './helpers';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
@ -13,14 +13,15 @@ describe('BuildVerifier', () => {
|
|||||||
organization: 'organization',
|
organization: 'organization',
|
||||||
repoSlug: 'repo/slug',
|
repoSlug: 'repo/slug',
|
||||||
secret: 'secret',
|
secret: 'secret',
|
||||||
|
trustedPrLabel: 'trusted: pr-label',
|
||||||
};
|
};
|
||||||
let bv: BuildVerifier;
|
let bv: BuildVerifier;
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
const createBuildVerifier = (partialConfig: Partial<typeof defaultConfig> = {}) => {
|
const createBuildVerifier = (partialConfig: Partial<typeof defaultConfig> = {}) => {
|
||||||
const cfg = {...defaultConfig, ...partialConfig};
|
const cfg = {...defaultConfig, ...partialConfig} as typeof defaultConfig;
|
||||||
return new BuildVerifier(cfg.secret, cfg.githubToken, cfg.repoSlug, cfg.organization,
|
return new BuildVerifier(cfg.secret, cfg.githubToken, cfg.repoSlug, cfg.organization,
|
||||||
cfg.allowedTeamSlugs);
|
cfg.allowedTeamSlugs, cfg.trustedPrLabel);
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(() => bv = createBuildVerifier());
|
beforeEach(() => bv = createBuildVerifier());
|
||||||
@ -28,12 +29,13 @@ describe('BuildVerifier', () => {
|
|||||||
|
|
||||||
describe('constructor()', () => {
|
describe('constructor()', () => {
|
||||||
|
|
||||||
['secret', 'githubToken', 'repoSlug', 'organization', 'allowedTeamSlugs'].forEach(param => {
|
['secret', 'githubToken', 'repoSlug', 'organization', 'allowedTeamSlugs', 'trustedPrLabel'].
|
||||||
it(`should throw if '${param}' is missing or empty`, () => {
|
forEach(param => {
|
||||||
expect(() => createBuildVerifier({[param]: ''})).
|
it(`should throw if '${param}' is missing or empty`, () => {
|
||||||
toThrowError(`Missing or empty required parameter '${param}'!`);
|
expect(() => createBuildVerifier({[param]: ''})).
|
||||||
|
toThrowError(`Missing or empty required parameter '${param}'!`);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should throw if \'allowedTeamSlugs\' is an empty array', () => {
|
it('should throw if \'allowedTeamSlugs\' is an empty array', () => {
|
||||||
@ -44,6 +46,122 @@ describe('BuildVerifier', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('getPrIsTrusted()', () => {
|
||||||
|
const pr = 9;
|
||||||
|
let mockPrInfo: PullRequest;
|
||||||
|
let prsFetchSpy: jasmine.Spy;
|
||||||
|
let teamsIsMemberBySlugSpy: jasmine.Spy;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockPrInfo = {
|
||||||
|
labels: [
|
||||||
|
{name: 'foo'},
|
||||||
|
{name: 'bar'},
|
||||||
|
],
|
||||||
|
number: 9,
|
||||||
|
user: {login: 'username'},
|
||||||
|
};
|
||||||
|
|
||||||
|
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
|
||||||
|
and.returnValue(Promise.resolve(mockPrInfo));
|
||||||
|
|
||||||
|
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
|
||||||
|
and.returnValue(Promise.resolve(true));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should return a promise', done => {
|
||||||
|
const promise = bv.getPrIsTrusted(pr);
|
||||||
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
|
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
|
||||||
|
|
||||||
|
expect(promise).toEqual(jasmine.any(Promise));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should fetch the corresponding PR', done => {
|
||||||
|
bv.getPrIsTrusted(pr).then(() => {
|
||||||
|
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should fail if fetching the PR errors', done => {
|
||||||
|
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
|
||||||
|
bv.getPrIsTrusted(pr).catch(err => {
|
||||||
|
expect(err).toBe('Test');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('when the PR has the "trusted PR" label', () => {
|
||||||
|
|
||||||
|
beforeEach(() => mockPrInfo.labels.push({name: 'trusted: pr-label'}));
|
||||||
|
|
||||||
|
|
||||||
|
it('should resolve to true', done => {
|
||||||
|
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||||
|
expect(isTrusted).toBe(true);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not try to verify the author\'s membership status', done => {
|
||||||
|
bv.getPrIsTrusted(pr).then(() => {
|
||||||
|
expect(teamsIsMemberBySlugSpy).not.toHaveBeenCalled();
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('when the PR does not have the "trusted PR" label', () => {
|
||||||
|
|
||||||
|
it('should verify the PR author\'s membership in the specified teams', done => {
|
||||||
|
bv.getPrIsTrusted(pr).then(() => {
|
||||||
|
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should fail if verifying membership errors', done => {
|
||||||
|
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
|
||||||
|
bv.getPrIsTrusted(pr).catch(err => {
|
||||||
|
expect(err).toBe('Test');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should resolve to true if the PR\'s author is a member', done => {
|
||||||
|
teamsIsMemberBySlugSpy.and.returnValue(Promise.resolve(true));
|
||||||
|
|
||||||
|
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||||
|
expect(isTrusted).toBe(true);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should resolve to false if the PR\'s author is not a member', done => {
|
||||||
|
teamsIsMemberBySlugSpy.and.returnValue(Promise.resolve(false));
|
||||||
|
|
||||||
|
bv.getPrIsTrusted(pr).then(isTrusted => {
|
||||||
|
expect(isTrusted).toBe(false);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
describe('verify()', () => {
|
describe('verify()', () => {
|
||||||
const pr = 9;
|
const pr = 9;
|
||||||
const defaultJwt = {
|
const defaultJwt = {
|
||||||
@ -53,22 +171,21 @@ describe('BuildVerifier', () => {
|
|||||||
'pull-request': pr,
|
'pull-request': pr,
|
||||||
'slug': defaultConfig.repoSlug,
|
'slug': defaultConfig.repoSlug,
|
||||||
};
|
};
|
||||||
let bvGetPrAuthorTeamMembership: jasmine.Spy;
|
let bvGetPrIsTrusted: jasmine.Spy;
|
||||||
|
|
||||||
// Heleprs
|
// Heleprs
|
||||||
const createAuthHeader = (partialJwt: Partial<typeof defaultJwt> = {}, secret: string = defaultConfig.secret) =>
|
const createAuthHeader = (partialJwt: Partial<typeof defaultJwt> = {}, secret: string = defaultConfig.secret) =>
|
||||||
`Token ${jwt.sign({...defaultJwt, ...partialJwt}, secret)}`;
|
`Token ${jwt.sign({...defaultJwt, ...partialJwt}, secret)}`;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
bvGetPrAuthorTeamMembership = spyOn(bv, 'getPrAuthorTeamMembership').
|
bvGetPrIsTrusted = spyOn(bv, 'getPrIsTrusted').and.returnValue(Promise.resolve(true));
|
||||||
and.returnValue(Promise.resolve({author: 'some-author', isMember: true}));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return a promise', done => {
|
it('should return a promise', done => {
|
||||||
const promise = bv.verify(pr, createAuthHeader());
|
const promise = bv.verify(pr, createAuthHeader());
|
||||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
||||||
// to avoid running the actual `bvGetPrAuthorTeamMembership()`.
|
// to avoid running the actual `bvGetPrIsTrusted()`.
|
||||||
|
|
||||||
expect(promise).toEqual(jasmine.any(Promise));
|
expect(promise).toEqual(jasmine.any(Promise));
|
||||||
});
|
});
|
||||||
@ -148,16 +265,16 @@ describe('BuildVerifier', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should call \'getPrAuthorTeamMembership()\' if the token is valid', done => {
|
it('should call \'getPrIsTrusted()\' if the token is valid', done => {
|
||||||
bv.verify(pr, createAuthHeader()).then(() => {
|
bv.verify(pr, createAuthHeader()).then(() => {
|
||||||
expect(bvGetPrAuthorTeamMembership).toHaveBeenCalledWith(pr);
|
expect(bvGetPrIsTrusted).toHaveBeenCalledWith(pr);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should fail if \'getPrAuthorTeamMembership()\' rejects', done => {
|
it('should fail if \'getPrIsTrusted()\' rejects', done => {
|
||||||
bvGetPrAuthorTeamMembership.and.callFake(() => Promise.reject('Test'));
|
bvGetPrIsTrusted.and.callFake(() => Promise.reject('Test'));
|
||||||
bv.verify(pr, createAuthHeader()).catch(err => {
|
bv.verify(pr, createAuthHeader()).catch(err => {
|
||||||
expectToBeUploadError(err, 403, `Error while verifying upload for PR ${pr}: Test`);
|
expectToBeUploadError(err, 403, `Error while verifying upload for PR ${pr}: Test`);
|
||||||
done();
|
done();
|
||||||
@ -165,97 +282,22 @@ describe('BuildVerifier', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should fail if \'getPrAuthorTeamMembership()\' reports no membership', done => {
|
it('should resolve to `verifiedNotTrusted` if \'getPrIsTrusted()\' returns false', done => {
|
||||||
const errorMessage = `Error while verifying upload for PR ${pr}: User 'test' is not an active member of any of ` +
|
bvGetPrIsTrusted.and.returnValue(Promise.resolve(false));
|
||||||
'the following teams: team1, team2';
|
bv.verify(pr, createAuthHeader()).then(value => {
|
||||||
|
expect(value).toBe(BUILD_VERIFICATION_STATUS.verifiedNotTrusted);
|
||||||
bvGetPrAuthorTeamMembership.and.returnValue(Promise.resolve({author: 'test', isMember: false}));
|
|
||||||
bv.verify(pr, createAuthHeader()).catch(err => {
|
|
||||||
expectToBeUploadError(err, 403, errorMessage);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should succeed if everything checks outs', done => {
|
it('should resolve to `verifiedAndTrusted` if \'getPrIsTrusted()\' returns true', done => {
|
||||||
bv.verify(pr, createAuthHeader()).then(done);
|
bv.verify(pr, createAuthHeader()).then(value => {
|
||||||
});
|
expect(value).toBe(BUILD_VERIFICATION_STATUS.verifiedAndTrusted);
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
describe('getPrAuthorTeamMembership()', () => {
|
|
||||||
const pr = 9;
|
|
||||||
let prsFetchSpy: jasmine.Spy;
|
|
||||||
let teamsIsMemberBySlugSpy: jasmine.Spy;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
prsFetchSpy = spyOn(GithubPullRequests.prototype, 'fetch').
|
|
||||||
and.returnValue(Promise.resolve({user: {login: 'username'}}));
|
|
||||||
|
|
||||||
teamsIsMemberBySlugSpy = spyOn(GithubTeams.prototype, 'isMemberBySlug').
|
|
||||||
and.returnValue(Promise.resolve(true));
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return a promise', done => {
|
|
||||||
const promise = bv.getPrAuthorTeamMembership(pr);
|
|
||||||
promise.then(done); // Do not complete the test (and release the spies) synchronously
|
|
||||||
// to avoid running the actual `GithubTeams#isMemberBySlug()`.
|
|
||||||
|
|
||||||
expect(promise).toEqual(jasmine.any(Promise));
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should fetch the corresponding PR', done => {
|
|
||||||
bv.getPrAuthorTeamMembership(pr).then(() => {
|
|
||||||
expect(prsFetchSpy).toHaveBeenCalledWith(pr);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should fail if fetching the PR errors', done => {
|
|
||||||
prsFetchSpy.and.callFake(() => Promise.reject('Test'));
|
|
||||||
bv.getPrAuthorTeamMembership(pr).catch(err => {
|
|
||||||
expect(err).toBe('Test');
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should verify the PR author\'s membership in the specified teams', done => {
|
|
||||||
bv.getPrAuthorTeamMembership(pr).then(() => {
|
|
||||||
expect(teamsIsMemberBySlugSpy).toHaveBeenCalledWith('username', ['team1', 'team2']);
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should fail if verifying membership errors', done => {
|
|
||||||
teamsIsMemberBySlugSpy.and.callFake(() => Promise.reject('Test'));
|
|
||||||
bv.getPrAuthorTeamMembership(pr).catch(err => {
|
|
||||||
expect(err).toBe('Test');
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return the PR\'s author and whether they are members', done => {
|
|
||||||
teamsIsMemberBySlugSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
|
||||||
|
|
||||||
Promise.all([
|
|
||||||
bv.getPrAuthorTeamMembership(pr).then(({author, isMember}) => {
|
|
||||||
expect(author).toBe('username');
|
|
||||||
expect(isMember).toBe(true);
|
|
||||||
}),
|
|
||||||
bv.getPrAuthorTeamMembership(pr).then(({author, isMember}) => {
|
|
||||||
expect(author).toBe('username');
|
|
||||||
expect(isMember).toBe(false);
|
|
||||||
}),
|
|
||||||
]).then(done);
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -4,8 +4,8 @@ import * as http from 'http';
|
|||||||
import * as supertest from 'supertest';
|
import * as supertest from 'supertest';
|
||||||
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
|
||||||
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
import {BuildCreator} from '../../lib/upload-server/build-creator';
|
||||||
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
import {ChangedPrVisibilityEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
|
||||||
import {BuildVerifier} from '../../lib/upload-server/build-verifier';
|
import {BUILD_VERIFICATION_STATUS, BuildVerifier} from '../../lib/upload-server/build-verifier';
|
||||||
import {uploadServerFactory as usf} from '../../lib/upload-server/upload-server-factory';
|
import {uploadServerFactory as usf} from '../../lib/upload-server/upload-server-factory';
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
@ -18,11 +18,12 @@ describe('uploadServerFactory', () => {
|
|||||||
githubToken: '12345',
|
githubToken: '12345',
|
||||||
repoSlug: 'repo/slug',
|
repoSlug: 'repo/slug',
|
||||||
secret: 'secret',
|
secret: 'secret',
|
||||||
|
trustedPrLabel: 'trusted: pr-label',
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
const createUploadServer = (partialConfig: Partial<typeof defaultConfig> = {}) =>
|
const createUploadServer = (partialConfig: Partial<typeof defaultConfig> = {}) =>
|
||||||
usf.create({...defaultConfig, ...partialConfig});
|
usf.create({...defaultConfig, ...partialConfig} as typeof defaultConfig);
|
||||||
|
|
||||||
|
|
||||||
describe('create()', () => {
|
describe('create()', () => {
|
||||||
@ -75,6 +76,12 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should throw if \'trustedPrLabel\' is missing or empty', () => {
|
||||||
|
expect(() => createUploadServer({trustedPrLabel: ''})).
|
||||||
|
toThrowError('Missing or empty required parameter \'trustedPrLabel\'!');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return an http.Server', () => {
|
it('should return an http.Server', () => {
|
||||||
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
|
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
|
||||||
const server = createUploadServer();
|
const server = createUploadServer();
|
||||||
@ -141,26 +148,71 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should post a comment on GitHub on \'build.created\'', () => {
|
describe('on \'build.created\'', () => {
|
||||||
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
|
let prsAddCommentSpy: jasmine.Spy;
|
||||||
const commentBody = 'The angular.io preview for 1234567 is available [here][1].\n\n' +
|
|
||||||
'[1]: https://pr42-1234567890.domain.name/';
|
|
||||||
|
|
||||||
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890'});
|
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
|
||||||
|
|
||||||
|
|
||||||
|
it('should post a comment on GitHub for public previews', () => {
|
||||||
|
const commentBody = 'You can preview 1234567890 at https://pr42-1234567890.domain.name/.';
|
||||||
|
|
||||||
|
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
|
||||||
|
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not post a comment on GitHub for non-public previews', () => {
|
||||||
|
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: false});
|
||||||
|
expect(prsAddCommentSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('on \'pr.changedVisibility\'', () => {
|
||||||
|
let prsAddCommentSpy: jasmine.Spy;
|
||||||
|
|
||||||
|
beforeEach(() => prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment'));
|
||||||
|
|
||||||
|
|
||||||
|
it('should post a comment on GitHub (for all SHAs) for PRs made public', () => {
|
||||||
|
const commentBody = 'You can preview 12345 at https://pr42-12345.domain.name/.\n' +
|
||||||
|
'You can preview 67890 at https://pr42-67890.domain.name/.';
|
||||||
|
|
||||||
|
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
|
||||||
|
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not post a comment on GitHub if no SHAs were affected', () => {
|
||||||
|
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: [], isPublic: true});
|
||||||
|
expect(prsAddCommentSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should not post a comment on GitHub for PRs made non-public', () => {
|
||||||
|
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: false});
|
||||||
|
expect(prsAddCommentSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
expect(prsAddCommentSpy).toHaveBeenCalledWith(42, commentBody);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should pass the correct \'githubToken\' and \'repoSlug\' to GithubPullRequests', () => {
|
it('should pass the correct \'githubToken\' and \'repoSlug\' to GithubPullRequests', () => {
|
||||||
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
|
const prsAddCommentSpy = spyOn(GithubPullRequests.prototype, 'addComment');
|
||||||
|
|
||||||
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890'});
|
buildCreator.emit(CreatedBuildEvent.type, {pr: 42, sha: '1234567890', isPublic: true});
|
||||||
const prs = prsAddCommentSpy.calls.mostRecent().object;
|
buildCreator.emit(ChangedPrVisibilityEvent.type, {pr: 42, shas: ['12345', '67890'], isPublic: true});
|
||||||
|
|
||||||
|
const allCalls = prsAddCommentSpy.calls.all();
|
||||||
|
const prs = allCalls[0].object;
|
||||||
|
|
||||||
|
expect(prsAddCommentSpy).toHaveBeenCalledTimes(2);
|
||||||
|
expect(prs).toBe(allCalls[1].object);
|
||||||
expect(prs).toEqual(jasmine.any(GithubPullRequests));
|
expect(prs).toEqual(jasmine.any(GithubPullRequests));
|
||||||
expect((prs as any).repoSlug).toBe('repo/slug');
|
expect(prs.repoSlug).toBe('repo/slug');
|
||||||
expect((prs as any).requestHeaders.Authorization).toContain('12345');
|
expect(prs.requestHeaders.Authorization).toContain('12345');
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -184,6 +236,7 @@ describe('uploadServerFactory', () => {
|
|||||||
defaultConfig.repoSlug,
|
defaultConfig.repoSlug,
|
||||||
defaultConfig.githubOrganization,
|
defaultConfig.githubOrganization,
|
||||||
defaultConfig.githubTeamSlugs,
|
defaultConfig.githubTeamSlugs,
|
||||||
|
defaultConfig.trustedPrLabel,
|
||||||
);
|
);
|
||||||
buildCreator = new BuildCreator(defaultConfig.buildsDir);
|
buildCreator = new BuildCreator(defaultConfig.buildsDir);
|
||||||
agent = supertest.agent((usf as any).createMiddleware(buildVerifier, buildCreator));
|
agent = supertest.agent((usf as any).createMiddleware(buildVerifier, buildCreator));
|
||||||
@ -199,17 +252,18 @@ describe('uploadServerFactory', () => {
|
|||||||
let buildCreatorCreateSpy: jasmine.Spy;
|
let buildCreatorCreateSpy: jasmine.Spy;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
buildVerifierVerifySpy = spyOn(buildVerifier, 'verify').and.returnValue(Promise.resolve());
|
const verStatus = BUILD_VERIFICATION_STATUS.verifiedAndTrusted;
|
||||||
|
buildVerifierVerifySpy = spyOn(buildVerifier, 'verify').and.returnValue(Promise.resolve(verStatus));
|
||||||
buildCreatorCreateSpy = spyOn(buildCreator, 'create').and.returnValue(Promise.resolve());
|
buildCreatorCreateSpy = spyOn(buildCreator, 'create').and.returnValue(Promise.resolve());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 405 for non-GET requests', done => {
|
it('should respond with 404 for non-GET requests', done => {
|
||||||
verifyRequests([
|
verifyRequests([
|
||||||
agent.put(`/create-build/${pr}/${sha}`).expect(405),
|
agent.put(`/create-build/${pr}/${sha}`).expect(404),
|
||||||
agent.post(`/create-build/${pr}/${sha}`).expect(405),
|
agent.post(`/create-build/${pr}/${sha}`).expect(404),
|
||||||
agent.patch(`/create-build/${pr}/${sha}`).expect(405),
|
agent.patch(`/create-build/${pr}/${sha}`).expect(404),
|
||||||
agent.delete(`/create-build/${pr}/${sha}`).expect(405),
|
agent.delete(`/create-build/${pr}/${sha}`).expect(404),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -284,14 +338,17 @@ describe('uploadServerFactory', () => {
|
|||||||
|
|
||||||
|
|
||||||
it('should call \'BuildCreator#create()\' with the correct arguments', done => {
|
it('should call \'BuildCreator#create()\' with the correct arguments', done => {
|
||||||
const req = agent.
|
buildVerifierVerifySpy.and.returnValues(
|
||||||
get(`/create-build/${pr}/${sha}`).
|
Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedAndTrusted),
|
||||||
set('AUTHORIZATION', 'foo').
|
Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
|
||||||
set('X-FILE', 'bar');
|
|
||||||
|
|
||||||
promisifyRequest(req).
|
const req1 = agent.get(`/create-build/${pr}/${sha}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
|
||||||
then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar')).
|
const req2 = agent.get(`/create-build/${pr}/${sha}`).set('AUTHORIZATION', 'foo').set('X-FILE', 'bar');
|
||||||
then(done, done.fail);
|
|
||||||
|
Promise.all([
|
||||||
|
promisifyRequest(req1).then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar', true)),
|
||||||
|
promisifyRequest(req2).then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'bar', false)),
|
||||||
|
]).then(done, done.fail);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -307,7 +364,7 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 201 on successful upload', done => {
|
it('should respond with 201 on successful upload (for public builds)', done => {
|
||||||
const req = agent.
|
const req = agent.
|
||||||
get(`/create-build/${pr}/${sha}`).
|
get(`/create-build/${pr}/${sha}`).
|
||||||
set('AUTHORIZATION', 'foo').
|
set('AUTHORIZATION', 'foo').
|
||||||
@ -318,6 +375,18 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 202 on successful upload (for hidden builds)', done => {
|
||||||
|
buildVerifierVerifySpy.and.returnValue(Promise.resolve(BUILD_VERIFICATION_STATUS.verifiedNotTrusted));
|
||||||
|
const req = agent.
|
||||||
|
get(`/create-build/${pr}/${sha}`).
|
||||||
|
set('AUTHORIZATION', 'foo').
|
||||||
|
set('X-FILE', 'bar').
|
||||||
|
expect(202, http.STATUS_CODES[202]);
|
||||||
|
|
||||||
|
verifyRequests([req], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should reject PRs with leading zeros', done => {
|
it('should reject PRs with leading zeros', done => {
|
||||||
verifyRequests([agent.get(`/create-build/0${pr}/${sha}`).expect(404)], done);
|
verifyRequests([agent.get(`/create-build/0${pr}/${sha}`).expect(404)], done);
|
||||||
});
|
});
|
||||||
@ -349,12 +418,12 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should respond with 405 for non-GET requests', done => {
|
it('should respond with 404 for non-GET requests', done => {
|
||||||
verifyRequests([
|
verifyRequests([
|
||||||
agent.put('/health-check').expect(405),
|
agent.put('/health-check').expect(404),
|
||||||
agent.post('/health-check').expect(405),
|
agent.post('/health-check').expect(404),
|
||||||
agent.patch('/health-check').expect(405),
|
agent.patch('/health-check').expect(404),
|
||||||
agent.delete('/health-check').expect(405),
|
agent.delete('/health-check').expect(404),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -373,11 +442,141 @@ describe('uploadServerFactory', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
describe('GET *', () => {
|
describe('POST /pr-updated', () => {
|
||||||
|
const pr = '9';
|
||||||
|
const url = '/pr-updated';
|
||||||
|
let bvGetPrIsTrustedSpy: jasmine.Spy;
|
||||||
|
let bcUpdatePrVisibilitySpy: jasmine.Spy;
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
const createRequest = (num: number, action?: string) =>
|
||||||
|
agent.post(url).send({number: num, action});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
bvGetPrIsTrustedSpy = spyOn(buildVerifier, 'getPrIsTrusted');
|
||||||
|
bcUpdatePrVisibilitySpy = spyOn(buildCreator, 'updatePrVisibility');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 404 for non-POST requests', done => {
|
||||||
|
verifyRequests([
|
||||||
|
agent.get(url).expect(404),
|
||||||
|
agent.put(url).expect(404),
|
||||||
|
agent.patch(url).expect(404),
|
||||||
|
agent.delete(url).expect(404),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 400 for requests without a payload', done => {
|
||||||
|
const responseBody = `Missing or empty 'number' field in request: POST ${url} {}`;
|
||||||
|
|
||||||
|
const request1 = agent.post(url);
|
||||||
|
const request2 = agent.post(url).send();
|
||||||
|
|
||||||
|
verifyRequests([
|
||||||
|
request1.expect(400, responseBody),
|
||||||
|
request2.expect(400, responseBody),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 400 for requests without a \'number\' field', done => {
|
||||||
|
const responseBodyPrefix = `Missing or empty 'number' field in request: POST ${url}`;
|
||||||
|
|
||||||
|
const request1 = agent.post(url).send({});
|
||||||
|
const request2 = agent.post(url).send({number: null});
|
||||||
|
|
||||||
|
verifyRequests([
|
||||||
|
request1.expect(400, `${responseBodyPrefix} {}`),
|
||||||
|
request2.expect(400, `${responseBodyPrefix} {"number":null}`),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should call \'BuildVerifier#gtPrIsTrusted()\' with the correct arguments', done => {
|
||||||
|
const req = createRequest(+pr);
|
||||||
|
|
||||||
|
promisifyRequest(req).
|
||||||
|
then(() => expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9)).
|
||||||
|
then(done, done.fail);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should propagate errors from BuildVerifier', done => {
|
||||||
|
bvGetPrIsTrustedSpy.and.callFake(() => Promise.reject('Test'));
|
||||||
|
|
||||||
|
const req = createRequest(+pr).expect(500, 'Test');
|
||||||
|
|
||||||
|
promisifyRequest(req).
|
||||||
|
then(() => {
|
||||||
|
expect(bvGetPrIsTrustedSpy).toHaveBeenCalledWith(9);
|
||||||
|
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done, done.fail);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should call \'BuildCreator#updatePrVisibility()\' with the correct arguments', done => {
|
||||||
|
bvGetPrIsTrustedSpy.and.callFake((pr2: number) => Promise.resolve(pr2 === 42));
|
||||||
|
|
||||||
|
const req1 = createRequest(24);
|
||||||
|
const req2 = createRequest(42);
|
||||||
|
|
||||||
|
Promise.all([
|
||||||
|
promisifyRequest(req1).then(() => expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith('24', false)),
|
||||||
|
promisifyRequest(req2).then(() => expect(bcUpdatePrVisibilitySpy).toHaveBeenCalledWith('42', true)),
|
||||||
|
]).then(done, done.fail);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should propagate errors from BuildCreator', done => {
|
||||||
|
bcUpdatePrVisibilitySpy.and.callFake(() => Promise.reject('Test'));
|
||||||
|
|
||||||
|
const req = createRequest(+pr).expect(500, 'Test');
|
||||||
|
verifyRequests([req], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
describe('on success', () => {
|
||||||
|
|
||||||
|
it('should respond with 200 (action: undefined)', done => {
|
||||||
|
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||||
|
|
||||||
|
const reqs = [4, 2].map(num => createRequest(num).expect(200, http.STATUS_CODES[200]));
|
||||||
|
verifyRequests(reqs, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 200 (action: labeled)', done => {
|
||||||
|
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||||
|
|
||||||
|
const reqs = [4, 2].map(num => createRequest(num, 'labeled').expect(200, http.STATUS_CODES[200]));
|
||||||
|
verifyRequests(reqs, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 200 (action: unlabeled)', done => {
|
||||||
|
bvGetPrIsTrustedSpy.and.returnValues(Promise.resolve(true), Promise.resolve(false));
|
||||||
|
|
||||||
|
const reqs = [4, 2].map(num => createRequest(num, 'unlabeled').expect(200, http.STATUS_CODES[200]));
|
||||||
|
verifyRequests(reqs, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should respond with 200 (and do nothing) if \'action\' implies no visibility change', done => {
|
||||||
|
const promises = ['foo', 'notlabeled'].
|
||||||
|
map(action => createRequest(+pr, action).expect(200, http.STATUS_CODES[200])).
|
||||||
|
map(promisifyRequest);
|
||||||
|
|
||||||
|
Promise.all(promises).
|
||||||
|
then(() => {
|
||||||
|
expect(bvGetPrIsTrustedSpy).not.toHaveBeenCalled();
|
||||||
|
expect(bcUpdatePrVisibilitySpy).not.toHaveBeenCalled();
|
||||||
|
}).
|
||||||
|
then(done, done.fail);
|
||||||
|
});
|
||||||
|
|
||||||
it('should respond with 404', done => {
|
|
||||||
const responseBody = 'Unknown resource in request: GET /some/url';
|
|
||||||
verifyRequests([agent.get('/some/url').expect(404, responseBody)], done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -385,14 +584,15 @@ describe('uploadServerFactory', () => {
|
|||||||
|
|
||||||
describe('ALL *', () => {
|
describe('ALL *', () => {
|
||||||
|
|
||||||
it('should respond with 405', done => {
|
it('should respond with 404', done => {
|
||||||
const responseFor = (method: string) => `Unsupported method in request: ${method.toUpperCase()} /some/url`;
|
const responseFor = (method: string) => `Unknown resource in request: ${method.toUpperCase()} /some/url`;
|
||||||
|
|
||||||
verifyRequests([
|
verifyRequests([
|
||||||
agent.put('/some/url').expect(405, responseFor('put')),
|
agent.get('/some/url').expect(404, responseFor('get')),
|
||||||
agent.post('/some/url').expect(405, responseFor('post')),
|
agent.put('/some/url').expect(404, responseFor('put')),
|
||||||
agent.patch('/some/url').expect(405, responseFor('patch')),
|
agent.post('/some/url').expect(404, responseFor('post')),
|
||||||
agent.delete('/some/url').expect(405, responseFor('delete')),
|
agent.patch('/some/url').expect(404, responseFor('patch')),
|
||||||
|
agent.delete('/some/url').expect(404, responseFor('delete')),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
# Set up env variables
|
# Set up env variables
|
||||||
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null)
|
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set +e -o pipefail
|
# Using `+e` so that all checks are run and we get a complete report (even if some checks failed).
|
||||||
|
set +e -u -o pipefail
|
||||||
|
|
||||||
|
|
||||||
# Variables
|
# Variables
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
exec >> /var/log/aio/init.log
|
exec >> /var/log/aio/init.log
|
||||||
exec 2>&1
|
exec 2>&1
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
# Set up env variables for production
|
# Set up env variables for production
|
||||||
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null)
|
export AIO_GITHUB_TOKEN=$(head -c -1 /aio-secrets/GITHUB_TOKEN 2>/dev/null || echo "MISSING_GITHUB_TOKEN")
|
||||||
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null)
|
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$(head -c -1 /aio-secrets/PREVIEW_DEPLOYMENT_TOKEN 2>/dev/null || echo "MISSING_PREVIEW_DEPLOYMENT_TOKEN")
|
||||||
|
|
||||||
# Start the upload-server instance
|
# Start the upload-server instance
|
||||||
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
|
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
# Set up env variables for testing
|
# Set up env variables for testing
|
||||||
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
|
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
|
||||||
export AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME
|
export AIO_DOMAIN_NAME=$TEST_AIO_DOMAIN_NAME
|
||||||
export AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION
|
export AIO_GITHUB_ORGANIZATION=$TEST_AIO_GITHUB_ORGANIZATION
|
||||||
export AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS
|
export AIO_GITHUB_TEAM_SLUGS=$TEST_AIO_GITHUB_TEAM_SLUGS
|
||||||
export AIO_PREVIEW_DEPLOYMENT_TOKEN=$TEST_AIO_PREVIEW_DEPLOYMENT_TOKEN
|
|
||||||
export AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG
|
export AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG
|
||||||
|
export AIO_TRUSTED_PR_LABEL=$TEST_AIO_TRUSTED_PR_LABEL
|
||||||
export AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME
|
export AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME
|
||||||
export AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT
|
export AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT
|
||||||
|
|
||||||
@ -21,7 +21,7 @@ appName=aio-upload-server-test
|
|||||||
if [[ "$1" == "stop" ]]; then
|
if [[ "$1" == "stop" ]]; then
|
||||||
pm2 delete $appName
|
pm2 delete $appName
|
||||||
else
|
else
|
||||||
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server/index-test.js \
|
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup/start-test-upload-server.js \
|
||||||
--log /var/log/aio/upload-server-test.log \
|
--log /var/log/aio/upload-server-test.log \
|
||||||
--name $appName \
|
--name $appName \
|
||||||
--no-autorestart \
|
--no-autorestart \
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
logFile=/var/log/aio/verify-setup.log
|
logFile=/var/log/aio/verify-setup.log
|
||||||
uploadServerLogFile=/var/log/aio/upload-server-verify-setup.log
|
uploadServerLogFile=/var/log/aio/upload-server-verify-setup.log
|
||||||
|
@ -4,7 +4,8 @@
|
|||||||
## Overview
|
## Overview
|
||||||
- [General overview](overview--general.md)
|
- [General overview](overview--general.md)
|
||||||
- [Security model](overview--security-model.md)
|
- [Security model](overview--security-model.md)
|
||||||
- [Available Commands](overview--scripts-and-commands.md)
|
- [Available scripts and commands](overview--scripts-and-commands.md)
|
||||||
|
- [HTTP status codes](overview--http-status-codes.md)
|
||||||
|
|
||||||
|
|
||||||
## Setting up the VM
|
## Setting up the VM
|
||||||
@ -20,7 +21,11 @@
|
|||||||
|
|
||||||
|
|
||||||
## Starting the docker container
|
## Starting the docker container
|
||||||
- [Create docker image](vm-setup--start-docker-container.md)
|
- [Start docker container](vm-setup--start-docker-container.md)
|
||||||
|
|
||||||
|
|
||||||
|
## Updating the docker container
|
||||||
|
- [Update docker container](vm-setup--update-docker-container.md)
|
||||||
|
|
||||||
|
|
||||||
## Miscellaneous
|
## Miscellaneous
|
||||||
|
@ -17,7 +17,7 @@ you don't need to specify values for those.
|
|||||||
The domain name of the server.
|
The domain name of the server.
|
||||||
|
|
||||||
- `AIO_GITHUB_ORGANIZATION`:
|
- `AIO_GITHUB_ORGANIZATION`:
|
||||||
The GitHub organization whose teams arew whitelisted for accepting uploads.
|
The GitHub organization whose teams are whitelisted for accepting uploads.
|
||||||
See also `AIO_GITHUB_TEAM_SLUGS`.
|
See also `AIO_GITHUB_TEAM_SLUGS`.
|
||||||
|
|
||||||
- `AIO_GITHUB_TEAM_SLUGS`:
|
- `AIO_GITHUB_TEAM_SLUGS`:
|
||||||
@ -39,6 +39,11 @@ you don't need to specify values for those.
|
|||||||
- `AIO_REPO_SLUG`:
|
- `AIO_REPO_SLUG`:
|
||||||
The repository slug (in the form `<user>/<repo>`) for which PRs will be uploaded.
|
The repository slug (in the form `<user>/<repo>`) for which PRs will be uploaded.
|
||||||
|
|
||||||
|
- `AIO_TRUSTED_PR_LABEL`:
|
||||||
|
The PR whose presence indicates the PR has been manually verified and is allowed to have its
|
||||||
|
build artifacts publicly served. This is useful for enabling previews for any PR (not only those
|
||||||
|
from trusted authors).
|
||||||
|
|
||||||
- `AIO_UPLOAD_HOSTNAME`:
|
- `AIO_UPLOAD_HOSTNAME`:
|
||||||
The internal hostname for accessing the Node.js upload-server. This is used by nginx for
|
The internal hostname for accessing the Node.js upload-server. This is used by nginx for
|
||||||
delegating upload requests and also for performing a periodic health-check.
|
delegating upload requests and also for performing a periodic health-check.
|
||||||
|
@ -3,10 +3,9 @@
|
|||||||
|
|
||||||
TODO (gkalpak): Add docs. Mention:
|
TODO (gkalpak): Add docs. Mention:
|
||||||
- Travis' JWT addon (+ limitations).
|
- Travis' JWT addon (+ limitations).
|
||||||
Relevant files: `.travis.yml`
|
Relevant files: `.travis.yml`, `scripts/ci/env.sh`
|
||||||
- Testing on CI.
|
- Testing on CI.
|
||||||
Relevant files: `ci/test-aio.sh`, `aio/aio-builds-setup/scripts/test.sh`
|
Relevant files: `scripts/ci/test-aio.sh`, `aio/aio-builds-setup/scripts/test.sh`
|
||||||
- Preverifying on CI.
|
|
||||||
Relevant files: `ci/deploy.sh`, `aio/aio-builds-setup/scripts/travis-preverify-pr.sh`
|
|
||||||
- Deploying from CI.
|
- Deploying from CI.
|
||||||
Relevant files: `ci/deploy.sh`, `aio/scripts/deploy-preview.sh`
|
Relevant files: `scripts/ci/deploy.sh`, `aio/scripts/deploy-preview.sh`,
|
||||||
|
`aio/scripts/deploy-to-firebase.sh`
|
||||||
|
@ -33,36 +33,69 @@ container:
|
|||||||
|
|
||||||
|
|
||||||
### On CI (Travis)
|
### On CI (Travis)
|
||||||
- Build job completes successfully (i.e. build succeeds and tests pass).
|
- Build job completes successfully.
|
||||||
- The CI script checks whether the build job was initiated by a PR against the angular/angular
|
- The CI script checks whether the build job was initiated by a PR against the angular/angular
|
||||||
master branch.
|
master branch.
|
||||||
- The CI script checks whether the PR has touched any files inside the angular.io project directory
|
- The CI script checks whether the PR has touched any files that might affect the angular.io app
|
||||||
(currently `aio/`).
|
(currently the `aio/` or `packages/` directories, ignoring spec files).
|
||||||
- The CI script checks whether the author of the PR is a member of one of the whitelisted GitHub
|
- Optionally, the CI script can check whether the PR can be automatically verified (i.e. if the
|
||||||
teams (and therefore allowed to upload).
|
author of the PR is a member of one of the whitelisted GitHub teams or the PR has the specified
|
||||||
|
"trusted PR" label).
|
||||||
**Note:**
|
**Note:**
|
||||||
For security reasons, the same checks will be performed on the server as well. This is an optional
|
For security reasons, the same checks will be performed on the server as well. This is an optional
|
||||||
step with the purpose of:
|
step that can be used in case one wants to apply special logic depending on the outcome of the
|
||||||
1. Avoiding the wasted overhead associated with uploads that are going to be rejected (e.g.
|
pre-verification. For example:
|
||||||
building the artifacts, sending them to the server, running checks on the server, etc).
|
1. One might want to deploy automatically verified PRs only. In that case, the pre-verification
|
||||||
2. Avoiding failing the build (due to an error response from the server) or requiring additional
|
helps avoid the wasted overhead associated with uploads that are going to be rejected (e.g.
|
||||||
logic for detecting the reasons of the failure.
|
building the artifacts, sending them to the server, running checks on the server, detecting the
|
||||||
- The CI script gzip and upload the build artifacts to the server.
|
reasons of deployment failure and whether to fail the build, etc).
|
||||||
|
2. One might want to apply additional logic (e.g. different tests) depending on whether the PR is
|
||||||
|
automatically verified or not).
|
||||||
|
- The CI script gzips and uploads the build artifacts to the server.
|
||||||
|
|
||||||
More info on how to set things up on CI can be found [here](misc--integrate-with-ci.md).
|
More info on how to set things up on CI can be found [here](misc--integrate-with-ci.md).
|
||||||
|
|
||||||
|
|
||||||
### Uploading build artifacts
|
### Uploading build artifacts
|
||||||
- nginx receives upload request.
|
- nginx receives the upload request.
|
||||||
- nginx checks that the uploaded gzip archive does not exceed the specified max file size, stores it
|
- nginx checks that the uploaded gzip archive does not exceed the specified max file size, stores it
|
||||||
in a temporary location and passes the filepath to the Node.js upload-server.
|
in a temporary location and passes the filepath to the Node.js upload-server.
|
||||||
- The upload-server verifies that the uploaded file is not trying to overwrite an existing build,
|
- The upload-server runs several checks to determine whether the request should be accepted and
|
||||||
and runs several checks to determine whether the request should be accepted (more details can be
|
whether it should be publicly accessible or stored for later verification (more details can be
|
||||||
found [here](overview--security-model.md)).
|
found [here](overview--security-model.md)).
|
||||||
- The upload-server deploys the artifacts to a sub-directory named after the PR number and SHA:
|
- The upload-server changes the "visibility" of the associated PR, if necessary. For example, if
|
||||||
`<PR>/<SHA>/`
|
builds for the same PR had been previously deployed as non-public and the current build has been
|
||||||
- The upload-server posts a comment on the corresponding PR on GitHub mentioning the SHA and the
|
automatically verified, all previous builds are made public as well.
|
||||||
the link where the preview can be found.
|
If the PR transitions from "non-public" to "public", the upload-server posts a comment on the
|
||||||
|
corresponding PR on GitHub mentioning the SHAs and the links where the previews can be found.
|
||||||
|
- The upload-server verifies that the uploaded file is not trying to overwrite an existing build.
|
||||||
|
- The upload-server deploys the artifacts to a sub-directory named after the PR number and the first
|
||||||
|
few characters of the SHA: `<PR>/<SHA>/`
|
||||||
|
(Non-publicly accessible PRs will be stored in a different location, but again derived from the PR
|
||||||
|
number and SHA.)
|
||||||
|
- If the PR is publicly accessible, the upload-server posts a comment on the corresponding PR on
|
||||||
|
GitHub mentioning the SHA and the link where the preview can be found.
|
||||||
|
|
||||||
|
More info on the possible HTTP status codes and their meaning can be found
|
||||||
|
[here](overview--http-status-codes.md).
|
||||||
|
|
||||||
|
|
||||||
|
### Updating PR visibility
|
||||||
|
- nginx receives a natification that a PR has been updated and passes it through to the
|
||||||
|
upload-server. This could, for example, be sent by a GitHub webhook every time a PR's labels
|
||||||
|
change.
|
||||||
|
E.g.: `ngbuilds.io/pr-updated` (payload: `{"number":<PR>,"action":"labeled"}`)
|
||||||
|
- The request contains the PR number (as `number`) and optionally the action that triggered the
|
||||||
|
request (as `action`) in the payload.
|
||||||
|
- The upload-server verifies the payload and determines whether the `action` (if specified) could
|
||||||
|
have led to PR visibility changes. Only requests that omit the `action` field altogether or
|
||||||
|
specify an action that can affect visibility are further processed.
|
||||||
|
(Currently, the only actions that are considered capable of affecting visibility are `labeled` and
|
||||||
|
`unlabeled`.)
|
||||||
|
- The upload-server re-checks and if necessary updates the PR's visibility.
|
||||||
|
|
||||||
|
More info on the possible HTTP status codes and their meaning can be found
|
||||||
|
[here](overview--http-status-codes.md).
|
||||||
|
|
||||||
|
|
||||||
### Serving build artifacts
|
### Serving build artifacts
|
||||||
@ -71,6 +104,9 @@ More info on how to set things up on CI can be found [here](misc--integrate-with
|
|||||||
- nginx maps the subdomain to the correct sub-directory and serves the resource.
|
- nginx maps the subdomain to the correct sub-directory and serves the resource.
|
||||||
E.g.: `/<PR>/<SHA>/path/to/resource`
|
E.g.: `/<PR>/<SHA>/path/to/resource`
|
||||||
|
|
||||||
|
More info on the possible HTTP status codes and their meaning can be found
|
||||||
|
[here](overview--http-status-codes.md).
|
||||||
|
|
||||||
|
|
||||||
### Removing obsolete artifacts
|
### Removing obsolete artifacts
|
||||||
In order to avoid flooding the disk with unnecessary build artifacts, there is a cronjob that runs a
|
In order to avoid flooding the disk with unnecessary build artifacts, there is a cronjob that runs a
|
||||||
|
84
aio/aio-builds-setup/docs/overview--http-status-codes.md
Normal file
84
aio/aio-builds-setup/docs/overview--http-status-codes.md
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
# Overview - HTTP Status Codes
|
||||||
|
|
||||||
|
|
||||||
|
This is a list of all the possible HTTP status codes returned by the nginx anf upload servers, along
|
||||||
|
with a bried explanation of what they mean:
|
||||||
|
|
||||||
|
|
||||||
|
## `http://*.ngbuilds.io/*`
|
||||||
|
|
||||||
|
- **307 (Temporary Redirect)**:
|
||||||
|
All non-HTTPS requests. 308 (Permanent Redirect) would be more appropriate, but is not supported
|
||||||
|
by all agents (e.g. cURL).
|
||||||
|
|
||||||
|
|
||||||
|
## `https://pr<pr>-<sha>.ngbuilds.io/*`
|
||||||
|
|
||||||
|
- **200 (OK)**:
|
||||||
|
File was found or URL was rewritten to `/index.html` (i.e. all paths that have no `.` in final
|
||||||
|
segment).
|
||||||
|
|
||||||
|
- **403 (Forbidden)**:
|
||||||
|
Trying to access a sub-directory.
|
||||||
|
|
||||||
|
- **404 (Not Found)**:
|
||||||
|
File not found.
|
||||||
|
|
||||||
|
|
||||||
|
## `https://ngbuilds.io/create-build/<pr>/<sha>`
|
||||||
|
|
||||||
|
- **201 (Created)**:
|
||||||
|
Build deployed successfully and is publicly available.
|
||||||
|
|
||||||
|
- **202 (Accepted)**:
|
||||||
|
Build not automatically verifiable. Stored for later deployment (after re-verification).
|
||||||
|
|
||||||
|
- **400 (Bad Request)**:
|
||||||
|
No payload.
|
||||||
|
|
||||||
|
- **401 (Unauthorized)**:
|
||||||
|
No `AUTHORIZATION` header.
|
||||||
|
|
||||||
|
- **403 (Forbidden)**:
|
||||||
|
Unable to verify build (e.g. invalid JWT token, or unable to talk to 3rd-party APIs, etc).
|
||||||
|
|
||||||
|
- **405 (Method Not Allowed)**:
|
||||||
|
Request method other than POST.
|
||||||
|
|
||||||
|
- **409 (Conflict)**:
|
||||||
|
Request to overwrite existing directory (e.g. deploy existing build or change PR visibility when
|
||||||
|
the destination directory does already exist).
|
||||||
|
|
||||||
|
- **413 (Payload Too Large)**:
|
||||||
|
Payload larger than size specified in `AIO_UPLOAD_MAX_SIZE`.
|
||||||
|
|
||||||
|
|
||||||
|
## `https://ngbuilds.io/health-check`
|
||||||
|
|
||||||
|
- **200 (OK)**:
|
||||||
|
The server is healthy (i.e. up and running and processing requests).
|
||||||
|
|
||||||
|
|
||||||
|
## `https://ngbuilds.io/pr-updated`
|
||||||
|
|
||||||
|
- **200 (OK)**:
|
||||||
|
Request processed successfully. Processing may or may not have resulted in further actions.
|
||||||
|
|
||||||
|
- **400 (Bad Request)**:
|
||||||
|
No payload or no `number` field in payload.
|
||||||
|
|
||||||
|
- **405 (Method Not Allowed)**:
|
||||||
|
Request method other than POST.
|
||||||
|
|
||||||
|
- **409 (Conflict)**:
|
||||||
|
Request to overwrite existing directory (i.e. directories for both visibilities exist).
|
||||||
|
(Normally, this should not happen.)
|
||||||
|
|
||||||
|
|
||||||
|
## `https://*.ngbuilds.io/*`
|
||||||
|
|
||||||
|
- **404 (Not Found)**:
|
||||||
|
Request not matched by the above rules.
|
||||||
|
|
||||||
|
- **500 (Internal Server Error)**:
|
||||||
|
Error while processing a request matched by the above rules.
|
@ -8,19 +8,17 @@ This is an overview of the available scripts and commands.
|
|||||||
The scripts are located inside `<aio-builds-setup-dir>/scripts/`. The following scripts are
|
The scripts are located inside `<aio-builds-setup-dir>/scripts/`. The following scripts are
|
||||||
available:
|
available:
|
||||||
|
|
||||||
- `build.sh`:
|
- `create-image.sh`:
|
||||||
Can be used for creating a preconfigured docker image.
|
Can be used for creating a preconfigured docker image.
|
||||||
See [here](vm-setup--create-docker-image.md) for more info.
|
See [here](vm-setup--create-docker-image.md) for more info.
|
||||||
|
|
||||||
- `test.sh`
|
- `test.sh`:
|
||||||
Can be used for running the tests for `<aio-builds-setup-dir>/dockerbuild/scripts-js/`. This is
|
Can be used for running the tests for `<aio-builds-setup-dir>/dockerbuild/scripts-js/`. This is
|
||||||
useful for CI integration. See [here](misc--integrate-with-ci.md) for more info.
|
useful for CI integration. See [here](misc--integrate-with-ci.md) for more info.
|
||||||
|
|
||||||
- `travis-preverify-pr.sh`
|
- `update-preview-server.sh`:
|
||||||
Can be used for "preverifying" a PR before uploading the artifacts to the server. It checks that
|
Can be used for updating the docker container (and image) based on the latest changes checked out
|
||||||
the author of the PR a member of one of the specified GitHub teams and therefore allowed to upload
|
from a git repository. See [here](vm-setup--update-docker-container.md) for more info.
|
||||||
build artifacts. This is useful for CI integration. See [here](misc--integrate-with-ci.md) for
|
|
||||||
more info.
|
|
||||||
|
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
@ -41,12 +41,13 @@ part of the CI setup and serving them publicly.
|
|||||||
The implemented approach can be broken up to the following sub-tasks:
|
The implemented approach can be broken up to the following sub-tasks:
|
||||||
|
|
||||||
1. Verify which PR the uploaded artifacts correspond to.
|
1. Verify which PR the uploaded artifacts correspond to.
|
||||||
2. Determine the author of the PR.
|
2. Fetch the PR's metadata, including author and labels.
|
||||||
3. Check whether the PR author is a member of some whitelisted GitHub team.
|
3. Check whether the PR can be automatically verified as "trusted" (based on its author or labels).
|
||||||
4. Deploy the artifacts to the corresponding PR's directory.
|
4. If necessary, update the corresponding PR's verification status.
|
||||||
5. Prevent overwriting previously deployed artifacts (which ensures that the guarantees established
|
5. Deploy the artifacts to the corresponding PR's directory.
|
||||||
|
6. Prevent overwriting previously deployed artifacts (which ensures that the guarantees established
|
||||||
during deployment will remain valid until the artifacts are removed).
|
during deployment will remain valid until the artifacts are removed).
|
||||||
6. Prevent uploaded files from accessing anything outside their directory.
|
7. Prevent uploaded files from accessing anything outside their directory.
|
||||||
|
|
||||||
|
|
||||||
### Implementation details
|
### Implementation details
|
||||||
@ -65,35 +66,51 @@ This section describes how each of the aforementioned sub-tasks is accomplished:
|
|||||||
_There are currently certain limitation in the implementation of the JWT addon._
|
_There are currently certain limitation in the implementation of the JWT addon._
|
||||||
_See the next section for more details._
|
_See the next section for more details._
|
||||||
|
|
||||||
2. **Determine the author of the PR.**
|
2. **Fetch the PR's metadata, including author and labels**.
|
||||||
|
|
||||||
Once we have securely associated the uploaded artifaacts to a PR, we retrieve the PR's metadata -
|
Once we have securely associated the uploaded artifacts to a PR, we retrieve the PR's metadata -
|
||||||
including the author's username - using the [GitHub API](https://developer.github.com/v3/).
|
including the author's username and the labels - using the
|
||||||
|
[GitHub API](https://developer.github.com/v3/).
|
||||||
To avoid rate-limit restrictions, we use a Personal Access Token (issued by
|
To avoid rate-limit restrictions, we use a Personal Access Token (issued by
|
||||||
[@mary-poppins](https://github.com/mary-poppins)).
|
[@mary-poppins](https://github.com/mary-poppins)).
|
||||||
|
|
||||||
3. **Check whether the PR author is a member of some whitelisted GitHub team.**
|
3. **Check whether the PR can be automatically verified as "trusted"**.
|
||||||
|
|
||||||
Again using the GitHub API, we can verify the author's membership in one of the
|
"Trusted" means that we are confident that the build artifacts are suitable for being deployed
|
||||||
whitelisted/trusted GitHub teams. For this operation, we need a PErsonal Access Token with the
|
and publicly accessible on the preview server. There are two ways to check that:
|
||||||
`read:org` scope issued by a user that can "see" the specified GitHub organization.
|
1. We can verify that the PR has a pre-determined label, which marks it as "safe for preview".
|
||||||
Here too, we use token by @mary-poppins.
|
Such a label can only have been added by a maintainer (with the necessary rights) and
|
||||||
|
designates that they have manually verified the PR contents.
|
||||||
|
2. We can verify (again using the GitHub API) the author's membership in one of the
|
||||||
|
whitelisted/trusted GitHub teams. For this operation, we need a Personal Access Token with the
|
||||||
|
`read:org` scope issued by a user that can "see" the specified GitHub organization.
|
||||||
|
Here too, we use the token by @mary-poppins.
|
||||||
|
|
||||||
4. **Deploy the artifacts to the corresponding PR's directory.**
|
4. **If necessary update the corresponding PR's verification status**.
|
||||||
|
|
||||||
With the preceeding steps, we have verified that the uploaded artifacts have been uploaded by
|
Once we have determined whether the PR is considered "trusted", we update its "visibility" (i.e.
|
||||||
Travis and correspond to a PR whose author is a member of a trusted team. Essentially, as long as
|
whether it is publicly accessible or not), based on the new verification status. For example, if
|
||||||
sub-tasks 1, 2 and 3 can be securely accomplished, it is possible to "project" the trust we have
|
a PR was initially considered "not trusted" but the check triggered by a new build determined
|
||||||
in a team's members through the PR and Travis to the build artifacts.
|
otherwise, the PR (and all the previously uploaded previews) are made public. It works the same
|
||||||
|
way if a PR has gone from "trusted" to "not trusted".
|
||||||
|
|
||||||
5. **Prevent overwriting previously deployed artifacts**.
|
5. **Deploy the artifacts to the corresponding PR's directory.**
|
||||||
|
|
||||||
In order to enforce this restriction (and ensure that the deployed artifacts validity is
|
With the preceding steps, we have verified that the uploaded artifacts have been uploaded by
|
||||||
|
Travis. Additionally, we have determined whether the PR can be trusted to have its previews
|
||||||
|
publicly accessible or whether further verification is necessary. The artifacts will be stored to
|
||||||
|
the PR's directory, but will not be publicly accessible unless the PR has been verified.
|
||||||
|
Essentially, as long as sub-tasks 1, 2 and 3 can be securely accomplished, it is possible to
|
||||||
|
"project" the trust we have in a team's members through the PR and Travis to the build artifacts.
|
||||||
|
|
||||||
|
6. **Prevent overwriting previously deployed artifacts**.
|
||||||
|
|
||||||
|
In order to enforce this restriction (and ensure that the deployed artifacts' validity is
|
||||||
preserved throughout their "lifetime"), the server that handles the upload (currently a Node.js
|
preserved throughout their "lifetime"), the server that handles the upload (currently a Node.js
|
||||||
Express server) rejects uploads that target an existing directory.
|
Express server) rejects uploads that target an existing directory.
|
||||||
_Note: A PR can contain multiple uploads; one for each SHA that was built on Travis._
|
_Note: A PR can contain multiple uploads; one for each SHA that was built on Travis._
|
||||||
|
|
||||||
6. **Prevent uploaded files from accessing anything outside their directory.**
|
7. **Prevent uploaded files from accessing anything outside their directory.**
|
||||||
|
|
||||||
Nginx (which is used to serve the uploaded artifacts) has been configured to not follow symlinks
|
Nginx (which is used to serve the uploaded artifacts) has been configured to not follow symlinks
|
||||||
outside of the directory where the build artifacts are stored.
|
outside of the directory where the build artifacts are stored.
|
||||||
@ -104,6 +121,11 @@ This section describes how each of the aforementioned sub-tasks is accomplished:
|
|||||||
- Each trusted PR author has full control over the content that is uploaded for their PRs. Part of
|
- Each trusted PR author has full control over the content that is uploaded for their PRs. Part of
|
||||||
the security model relies on the trustworthiness of these authors.
|
the security model relies on the trustworthiness of these authors.
|
||||||
|
|
||||||
|
- Adding the specified label on a PR and marking it as trusted, gives the author full control over
|
||||||
|
the content that is uploaded for the specific PR (e.g. by pushing more commits to it). The user
|
||||||
|
adding the label is responsible for ensuring that this control is not abused and that the PR is
|
||||||
|
either closed (one way of another) or the access is revoked.
|
||||||
|
|
||||||
- If anyone gets access to the `PREVIEW_DEPLOYMENT_TOKEN` (a.k.a. `NGBUILDS_IO_KEY` on
|
- If anyone gets access to the `PREVIEW_DEPLOYMENT_TOKEN` (a.k.a. `NGBUILDS_IO_KEY` on
|
||||||
angular/angular) variable generated for each Travis job, they will be able to impersonate the
|
angular/angular) variable generated for each Travis job, they will be able to impersonate the
|
||||||
corresponding PR's author on the preview server for as long as the token is valid (currently 90
|
corresponding PR's author on the preview server for as long as the token is valid (currently 90
|
||||||
|
@ -6,11 +6,11 @@
|
|||||||
|
|
||||||
|
|
||||||
## Build docker image
|
## Build docker image
|
||||||
- `<aio-builds-setup-dir>/scripts/build.sh [<name>[:<tag>] [--build-arg <NAME>=<value> ...]]`
|
- `<aio-builds-setup-dir>/scripts/create-image.sh [<name>[:<tag>] [--build-arg <NAME>=<value> ...]]`
|
||||||
- You can overwrite the default environment variables inside the image, by passing new values using
|
- You can overwrite the default environment variables inside the image, by passing new values using
|
||||||
`--build-arg`.
|
`--build-arg`.
|
||||||
|
|
||||||
**Note:** The build script has to execute docker commands with `sudo`.
|
**Note:** The script has to execute docker commands with `sudo`.
|
||||||
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
@ -25,7 +25,7 @@ The following commands would create a docker image from GitHub repo `foo/bar` to
|
|||||||
--build-arg AIO_REPO_SLUG=foo/bar \
|
--build-arg AIO_REPO_SLUG=foo/bar \
|
||||||
--build-arg AIO_DOMAIN_NAME=foobar-builds.io \
|
--build-arg AIO_DOMAIN_NAME=foobar-builds.io \
|
||||||
--build-arg AIO_GITHUB_ORGANIZATION=foo \
|
--build-arg AIO_GITHUB_ORGANIZATION=foo \
|
||||||
--build-arg AIO_GITHUB_TEMA_SLUGS=bar-core,bar-docs-authors
|
--build-arg AIO_GITHUB_TEAM_SLUGS=bar-core,bar-docs-authors
|
||||||
```
|
```
|
||||||
|
|
||||||
A full list of the available environment variables can be found
|
A full list of the available environment variables can be found
|
||||||
|
@ -41,9 +41,10 @@ certificate covering both the domain and subdomains.
|
|||||||
## Create directory for logs (Optional)
|
## Create directory for logs (Optional)
|
||||||
Optionally, a logs directory can pe passed to the docker container for storing non-system-related
|
Optionally, a logs directory can pe passed to the docker container for storing non-system-related
|
||||||
logs. If not provided, the logs are kept locally on the container and will be lost whenever the
|
logs. If not provided, the logs are kept locally on the container and will be lost whenever the
|
||||||
container is replaced (e.g. when updating to use a newer version of the docker image).
|
container is replaced (e.g. when updating to use a newer version of the docker image). Log files are
|
||||||
|
rotated and retained for 6 months.
|
||||||
|
|
||||||
The following files log files are kept in this directory:
|
The following log files are kept in this directory:
|
||||||
|
|
||||||
- `clean-up.log`:
|
- `clean-up.log`:
|
||||||
Output of the `aio-clean-up` command, run as a cronjob for cleaning up the build artifacts of
|
Output of the `aio-clean-up` command, run as a cronjob for cleaning up the build artifacts of
|
||||||
|
@ -7,16 +7,16 @@ command:
|
|||||||
|
|
||||||
```
|
```
|
||||||
sudo docker run \
|
sudo docker run \
|
||||||
-d \
|
--detach \
|
||||||
--dns 127.0.0.1 \
|
--dns 127.0.0.1 \
|
||||||
--name <instance-name> \
|
--name <instance-name> \
|
||||||
-p 80:80 \
|
--publish 80:80 \
|
||||||
-p 443:443 \
|
--publish 443:443 \
|
||||||
--restart unless-stopped \
|
--restart unless-stopped \
|
||||||
[-v <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
||||||
-v <host-secrets-dir>:/aio-secrets:ro \
|
--volume <host-secrets-dir>:/aio-secrets:ro \
|
||||||
-v <host-builds-dir>:/var/www/aio-builds \
|
--volume <host-builds-dir>:/var/www/aio-builds \
|
||||||
[-v <host-logs-dir>:/var/log/aio] \
|
[--volume <host-logs-dir>:/var/log/aio] \
|
||||||
<name>[:<tag>]
|
<name>[:<tag>]
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ can be found [here](https://docs.docker.com/engine/reference/run/).
|
|||||||
sudo docker run \
|
sudo docker run \
|
||||||
|
|
||||||
# Start as a daemon.
|
# Start as a daemon.
|
||||||
-d \
|
--detach \
|
||||||
|
|
||||||
# Use the local DNS server.
|
# Use the local DNS server.
|
||||||
# (This is necessary for mapping internal URLs, e.g. for the Node.js upload-server.)
|
# (This is necessary for mapping internal URLs, e.g. for the Node.js upload-server.)
|
||||||
@ -37,9 +37,9 @@ sudo docker run \
|
|||||||
# Useful for running `docker` commands, e.g.: `docker stop <instance-name>`
|
# Useful for running `docker` commands, e.g.: `docker stop <instance-name>`
|
||||||
--name <instance-name> \
|
--name <instance-name> \
|
||||||
|
|
||||||
# Map ports of the hosr VM (left) to ports of the docker container (right)
|
# Map ports of the host VM (left) to ports of the docker container (right)
|
||||||
-p 80:80 \
|
--publish 80:80 \
|
||||||
-p 443:443 \
|
--publish 443:443 \
|
||||||
|
|
||||||
# Automatically restart the container (unless it was explicitly stopped by the user).
|
# Automatically restart the container (unless it was explicitly stopped by the user).
|
||||||
# (This ensures that the container will be automatically started on boot.)
|
# (This ensures that the container will be automatically started on boot.)
|
||||||
@ -48,22 +48,22 @@ sudo docker run \
|
|||||||
# The directory the contains the SSL certificates.
|
# The directory the contains the SSL certificates.
|
||||||
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
||||||
# If not provided, the container will use self-signed certificates.
|
# If not provided, the container will use self-signed certificates.
|
||||||
[-v <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
[--volume <host-cert-dir>:/etc/ssl/localcerts:ro] \
|
||||||
|
|
||||||
# The directory the contains the secrets (e.g. GitHub token, JWT secret, etc).
|
# The directory the contains the secrets (e.g. GitHub token, JWT secret, etc).
|
||||||
# (See [here](vm-setup--set-up-secrets.md) for more info.)
|
# (See [here](vm-setup--set-up-secrets.md) for more info.)
|
||||||
-v <host-secrets-dir>:/aio-secrets:ro \
|
--volume <host-secrets-dir>:/aio-secrets:ro \
|
||||||
|
|
||||||
# The uploaded build artifacts will stored to and served from this directory.
|
# The uploaded build artifacts will stored to and served from this directory.
|
||||||
# (If you are using a persistent disk - as described [here](vm-setup--attach-persistent-disk.md) -
|
# (If you are using a persistent disk - as described [here](vm-setup--attach-persistent-disk.md) -
|
||||||
# this will be a directory inside the disk.)
|
# this will be a directory inside the disk.)
|
||||||
-v <host-builds-dir>:/var/www/aio-builds \
|
--volume <host-builds-dir>:/var/www/aio-builds \
|
||||||
|
|
||||||
# The directory where the logs are being kept.
|
# The directory where the logs are being kept.
|
||||||
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
# (See [here](vm-setup--create-host-dirs-and-files.md) for more info.)
|
||||||
# If not provided, the logs will be kept inside the container, which means they will be lost
|
# If not provided, the logs will be kept inside the container, which means they will be lost
|
||||||
# whenever a new container is created.
|
# whenever a new container is created.
|
||||||
[-v <host-logs-dir>:/var/log/aio] \
|
[--volume <host-logs-dir>:/var/log/aio] \
|
||||||
|
|
||||||
# The name of the docker image to use (and an optional tag; defaults to `latest`).
|
# The name of the docker image to use (and an optional tag; defaults to `latest`).
|
||||||
# (See [here](vm-setup--create-docker-image.md) for instructions on how to create the iamge.)
|
# (See [here](vm-setup--create-docker-image.md) for instructions on how to create the iamge.)
|
||||||
@ -78,15 +78,15 @@ by the container for accesing secrets and SSL certificates and keeping the build
|
|||||||
|
|
||||||
```
|
```
|
||||||
sudo docker run \
|
sudo docker run \
|
||||||
-d \
|
--detach \
|
||||||
--dns 127.0.0.1 \
|
--dns 127.0.0.1 \
|
||||||
--name foobar-builds-1 \
|
--name foobar-builds-1 \
|
||||||
-p 80:80 \
|
--publish 80:80 \
|
||||||
-p 443:443 \
|
--publish 443:443 \
|
||||||
--restart unless-stopped \
|
--restart unless-stopped \
|
||||||
-v /etc/ssl/localcerts:/etc/ssl/localcerts:ro \
|
--volume /etc/ssl/localcerts:/etc/ssl/localcerts:ro \
|
||||||
-v /foobar-secrets:/aio-secrets:ro \
|
--volume /foobar-secrets:/aio-secrets:ro \
|
||||||
-v /mnt/disks/foobar-builds:/var/www/aio-builds \
|
--volume /mnt/disks/foobar-builds:/var/www/aio-builds \
|
||||||
-v /foobar-logs:/var/log/aio \
|
--volume /foobar-logs:/var/log/aio \
|
||||||
foobar-builds
|
foobar-builds
|
||||||
```
|
```
|
||||||
|
@ -0,0 +1,52 @@
|
|||||||
|
# VM setup - Update docker container
|
||||||
|
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Assuming you have cloned the repository containing the preview server code (as described
|
||||||
|
[here](vm-setup--create-docker-image.md)), you can use the `update-preview-server.sh` script on the
|
||||||
|
VM host to update the preview server based on changes in the source code.
|
||||||
|
|
||||||
|
The script will pull the latest changes from the origin's master branch and examine if there have
|
||||||
|
been any changes in files inside the preview server source code directory (see below). If there are,
|
||||||
|
it will create a new image and verify that is works as expected. Finally, it will stop and remove
|
||||||
|
the old docker container and image, create and new container based on the new image and start it.
|
||||||
|
|
||||||
|
The script assumes that the preview server source code is in the repository's
|
||||||
|
`aio/aio-builds-setup/` directory and expects the following inputs:
|
||||||
|
|
||||||
|
- **$1**: `HOST_REPO_DIR`
|
||||||
|
- **$2**: `HOST_LOCALCERTS_DIR`
|
||||||
|
- **$3**: `HOST_SECRETS_DIR`
|
||||||
|
- **$4**: `HOST_BUILDS_DIR`
|
||||||
|
- **$5**: `HOST_LOGS_DIR`
|
||||||
|
|
||||||
|
See [here](vm-setup--create-host-dirs-and-files.md) for more info on what each input directory is
|
||||||
|
used for.
|
||||||
|
|
||||||
|
**Note 1:** The script has to execute docker commands with `sudo`.
|
||||||
|
|
||||||
|
**Note 2:** Make sure the user that executes the script has access to update the repository
|
||||||
|
|
||||||
|
|
||||||
|
## Run the script manually
|
||||||
|
You may choose to manually run the script, when necessary. Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
update-preview-server.sh \
|
||||||
|
/path/to/repo \
|
||||||
|
/path/to/localcerts \
|
||||||
|
/path/to/secrets \
|
||||||
|
/path/to/builds \
|
||||||
|
/path/to/logs
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Run the script automatically
|
||||||
|
You may choose to automatically trigger the script, e.g. using a cronjob. For example, the following
|
||||||
|
cronjob entry would run the script every hour and update the preview server (assuming the user has
|
||||||
|
the necessary permissions):
|
||||||
|
|
||||||
|
```
|
||||||
|
# Periodically check for changes and update the preview server (if necessary)
|
||||||
|
*/30 * * * * /path/to/update-preview-server.sh /path/to/repo /path/to/localcerts /path/to/secrets /path/to/builds /path/to/logs
|
||||||
|
```
|
@ -2,14 +2,16 @@
|
|||||||
set -eux -o pipefail
|
set -eux -o pipefail
|
||||||
|
|
||||||
# Set up env
|
# Set up env
|
||||||
source "`dirname $0`/env.sh"
|
source "`dirname $0`/_env.sh"
|
||||||
readonly defaultImageNameAndTag="aio-builds:latest"
|
readonly defaultImageNameAndTag="aio-builds:latest"
|
||||||
|
|
||||||
# Build `scripts-js/`
|
# Build `scripts-js/`
|
||||||
cd "$SCRIPTS_JS_DIR"
|
# (Necessary, because only `scripts-js/dist/` is copied to the docker image.)
|
||||||
yarn install
|
(
|
||||||
yarn run build
|
cd "$SCRIPTS_JS_DIR"
|
||||||
cd -
|
yarn install
|
||||||
|
yarn build
|
||||||
|
)
|
||||||
|
|
||||||
# Create docker image
|
# Create docker image
|
||||||
readonly nameAndOptionalTag=${1:-$defaultImageNameAndTag}
|
readonly nameAndOptionalTag=${1:-$defaultImageNameAndTag}
|
@ -2,10 +2,11 @@
|
|||||||
set -eux -o pipefail
|
set -eux -o pipefail
|
||||||
|
|
||||||
# Set up env
|
# Set up env
|
||||||
source "`dirname $0`/env.sh"
|
source "`dirname $0`/_env.sh"
|
||||||
|
|
||||||
# Test `scripts-js/`
|
# Test `scripts-js/`
|
||||||
cd "$SCRIPTS_JS_DIR"
|
(
|
||||||
yarn install
|
cd "$SCRIPTS_JS_DIR"
|
||||||
yarn test
|
yarn install
|
||||||
cd -
|
yarn test
|
||||||
|
)
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -eux -o pipefail
|
|
||||||
|
|
||||||
# Set up env
|
|
||||||
source "`dirname $0`/env.sh"
|
|
||||||
|
|
||||||
# Preverify PR
|
|
||||||
AIO_GITHUB_ORGANIZATION="angular" \
|
|
||||||
AIO_GITHUB_TEAM_SLUGS="angular-core,aio-contributors" \
|
|
||||||
AIO_GITHUB_TOKEN=$(echo ${GITHUB_TEAM_MEMBERSHIP_CHECK_KEY} | rev) \
|
|
||||||
AIO_REPO_SLUG=$TRAVIS_REPO_SLUG \
|
|
||||||
AIO_PREVERIFY_PR=$TRAVIS_PULL_REQUEST \
|
|
||||||
node "$SCRIPTS_JS_DIR/dist/lib/upload-server/index-preverify-pr"
|
|
70
aio/aio-builds-setup/scripts/update-preview-server.sh
Executable file
70
aio/aio-builds-setup/scripts/update-preview-server.sh
Executable file
@ -0,0 +1,70 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eux -o pipefail
|
||||||
|
exec 3>&1
|
||||||
|
|
||||||
|
echo "[`date`] - Updating the preview server..."
|
||||||
|
|
||||||
|
# Input
|
||||||
|
readonly HOST_REPO_DIR=$1
|
||||||
|
readonly HOST_LOCALCERTS_DIR=$2
|
||||||
|
readonly HOST_SECRETS_DIR=$3
|
||||||
|
readonly HOST_BUILDS_DIR=$4
|
||||||
|
readonly HOST_LOGS_DIR=$5
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
readonly PROVISIONAL_IMAGE_NAME=aio-builds:provisional
|
||||||
|
readonly LATEST_IMAGE_NAME=aio-builds:latest
|
||||||
|
readonly CONTAINER_NAME=aio
|
||||||
|
|
||||||
|
# Run
|
||||||
|
(
|
||||||
|
cd "$HOST_REPO_DIR"
|
||||||
|
|
||||||
|
readonly lastDeployedCommit=$(git rev-parse HEAD)
|
||||||
|
echo "Currently at commit $lastDeployedCommit."
|
||||||
|
|
||||||
|
# Pull latest master from origin.
|
||||||
|
git pull origin master
|
||||||
|
|
||||||
|
# Do not update the server unless files inside `aio-builds-setup/` have changed
|
||||||
|
# or the last attempt failed (identified by the provisional image still being around).
|
||||||
|
readonly relevantChangedFilesCount=$(git diff --name-only $lastDeployedCommit...HEAD | grep -P "^aio/aio-builds-setup/" | wc -l)
|
||||||
|
readonly lastAttemptFailed=$(sudo docker rmi "$PROVISIONAL_IMAGE_NAME" >> /dev/fd/3 && echo "true" || echo "false")
|
||||||
|
if [[ $relevantChangedFilesCount -eq 0 ]] && [[ "$lastAttemptFailed" != "true" ]]; then
|
||||||
|
echo "Skipping update because no relevant files have been touched."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create and verify a new docker image.
|
||||||
|
aio/aio-builds-setup/scripts/create-image.sh "$PROVISIONAL_IMAGE_NAME"
|
||||||
|
readonly imageVerified=$(sudo docker run --dns 127.0.0.1 --rm --volume $HOST_SECRETS_DIR:/aio-secrets:ro "$PROVISIONAL_IMAGE_NAME" /bin/bash -c "aio-init && aio-health-check && aio-verify-setup" >> /dev/fd/3 && echo "true" || echo "false")
|
||||||
|
|
||||||
|
if [[ "$imageVerified" != "true" ]]; then
|
||||||
|
echo "Failed to verify new docker image. Aborting update!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove the old container and replace the docker image.
|
||||||
|
sudo docker stop "$CONTAINER_NAME" || true
|
||||||
|
sudo docker rm "$CONTAINER_NAME" || true
|
||||||
|
sudo docker rmi "$LATEST_IMAGE_NAME" || true
|
||||||
|
sudo docker tag "$PROVISIONAL_IMAGE_NAME" "$LATEST_IMAGE_NAME"
|
||||||
|
sudo docker rmi "$PROVISIONAL_IMAGE_NAME"
|
||||||
|
|
||||||
|
# Create and start a docker container based on the new image.
|
||||||
|
sudo docker run \
|
||||||
|
--detach \
|
||||||
|
--dns 127.0.0.1 \
|
||||||
|
--name "$CONTAINER_NAME" \
|
||||||
|
--publish 80:80 \
|
||||||
|
--publish 443:443 \
|
||||||
|
--restart unless-stopped \
|
||||||
|
--volume $HOST_LOCALCERTS_DIR:/etc/ssl/localcerts:ro \
|
||||||
|
--volume $HOST_SECRETS_DIR:/aio-secrets:ro \
|
||||||
|
--volume $HOST_BUILDS_DIR:/var/www/aio-builds \
|
||||||
|
--volume $HOST_LOGS_DIR:/var/log/aio \
|
||||||
|
"$LATEST_IMAGE_NAME"
|
||||||
|
|
||||||
|
echo "The new docker image has been successfully deployed."
|
||||||
|
)
|
23
aio/content/examples/.gitignore
vendored
23
aio/content/examples/.gitignore
vendored
@ -36,34 +36,35 @@ dist/
|
|||||||
*.3.js.map
|
*.3.js.map
|
||||||
!systemjs.config.*.js
|
!systemjs.config.*.js
|
||||||
!karma-test-shim.*.js
|
!karma-test-shim.*.js
|
||||||
|
!copy-dist-files.js
|
||||||
|
|
||||||
# AngularJS files
|
# AngularJS files
|
||||||
!**/*.ajs.js
|
!**/*.ajs.js
|
||||||
|
**/app/**/*.ajs.js
|
||||||
|
|
||||||
# aot
|
# aot
|
||||||
**/*.ngfactory.ts
|
*/aot/**/*
|
||||||
**/*.ngsummary.json
|
!*/aot/bs-config.json
|
||||||
**/*.shim.ngstyle.ts
|
!*/aot/index.html
|
||||||
**/*.metadata.json
|
|
||||||
!aot/bs-config.json
|
|
||||||
!aot/index.html
|
|
||||||
!copy-dist-files.js
|
|
||||||
!rollup-config.js
|
!rollup-config.js
|
||||||
|
|
||||||
|
# i18n
|
||||||
|
!i18n/src/systemjs-text-plugin.js
|
||||||
|
|
||||||
# testing
|
# testing
|
||||||
!testing/src/browser-test-shim.js
|
!testing/src/browser-test-shim.js
|
||||||
!testing/karma*.js
|
!testing/karma*.js
|
||||||
|
|
||||||
# TS to JS
|
# TS to JS
|
||||||
!cb-ts-to-js/js*/**/*.js
|
!ts-to-js/js*/**/*.js
|
||||||
cb-ts-to-js/js*/**/system*.js
|
ts-to-js/js*/**/system*.js
|
||||||
|
|
||||||
# webpack
|
# webpack
|
||||||
!webpack/**/config/*.js
|
!webpack/**/config/*.js
|
||||||
!webpack/**/*webpack*.js
|
!webpack/**/*webpack*.js
|
||||||
|
|
||||||
# style-guide
|
# styleguide
|
||||||
!style-guide/src/systemjs.custom.js
|
!styleguide/src/systemjs.custom.js
|
||||||
|
|
||||||
# plunkers
|
# plunkers
|
||||||
*plnkr.no-link.html
|
*plnkr.no-link.html
|
||||||
|
@ -14,13 +14,13 @@
|
|||||||
<h1>Example Snippets</h1>
|
<h1>Example Snippets</h1>
|
||||||
|
|
||||||
<!-- #docregion ngClass -->
|
<!-- #docregion ngClass -->
|
||||||
<div [ngClass]="{active: isActive}">
|
<div [ngClass]="{'active': isActive}">
|
||||||
<!-- #enddocregion ngClass -->
|
<!-- #enddocregion ngClass -->
|
||||||
[ngClass] active
|
[ngClass] active
|
||||||
</div>
|
</div>
|
||||||
<!-- #docregion ngClass -->
|
<!-- #docregion ngClass -->
|
||||||
<div [ngClass]="{active: isActive,
|
<div [ngClass]="{'active': isActive,
|
||||||
shazam: isImportant}">
|
'shazam': isImportant}">
|
||||||
<!-- #enddocregion ngClass -->
|
<!-- #enddocregion ngClass -->
|
||||||
[ngClass] active and boldly important
|
[ngClass] active and boldly important
|
||||||
</div>
|
</div>
|
||||||
@ -57,7 +57,7 @@
|
|||||||
|
|
||||||
<p></p>
|
<p></p>
|
||||||
<!-- #docregion ngStyle -->
|
<!-- #docregion ngStyle -->
|
||||||
<div [ngStyle]="{color: colorPreference}">
|
<div [ngStyle]="{'color': colorPreference}">
|
||||||
<!-- #enddocregion ngStyle -->
|
<!-- #enddocregion ngStyle -->
|
||||||
color preference #1
|
color preference #1
|
||||||
</div>
|
</div>
|
@ -19,7 +19,7 @@ export class AppComponent {
|
|||||||
movie: IMovie = null;
|
movie: IMovie = null;
|
||||||
movies: IMovie[] = [];
|
movies: IMovie[] = [];
|
||||||
showImage = true;
|
showImage = true;
|
||||||
title: string = 'AngularJS to Angular Quick Ref Cookbook';
|
title = 'AngularJS to Angular Quick Ref Cookbook';
|
||||||
toggleImage(event: UIEvent) {
|
toggleImage(event: UIEvent) {
|
||||||
this.showImage = !this.showImage;
|
this.showImage = !this.showImage;
|
||||||
this.eventType = (event && event.type) || 'not provided';
|
this.eventType = (event && event.type) || 'not provided';
|
@ -19,7 +19,7 @@ import { MovieService } from './movie.service';
|
|||||||
export class MovieListComponent {
|
export class MovieListComponent {
|
||||||
// #enddocregion class
|
// #enddocregion class
|
||||||
favoriteHero: string;
|
favoriteHero: string;
|
||||||
showImage: boolean = false;
|
showImage = false;
|
||||||
movies: IMovie[];
|
movies: IMovie[];
|
||||||
|
|
||||||
// #docregion di
|
// #docregion di
|
Before Width: | Height: | Size: 7.1 KiB After Width: | Height: | Size: 7.1 KiB |
Before Width: | Height: | Size: 7.1 KiB After Width: | Height: | Size: 7.1 KiB |
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
1
aio/content/examples/animations/.gitignore
vendored
1
aio/content/examples/animations/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
**/*.js
|
|
@ -44,7 +44,7 @@ import { Heroes } from './hero.service';
|
|||||||
animate('0.2s ease-in')
|
animate('0.2s ease-in')
|
||||||
]),
|
]),
|
||||||
transition('* => void', [
|
transition('* => void', [
|
||||||
animate('0.2s 10 ease-out', style({
|
animate('0.2s 0.1s ease-out', style({
|
||||||
opacity: 0,
|
opacity: 0,
|
||||||
transform: 'translateX(100%)'
|
transform: 'translateX(100%)'
|
||||||
}))
|
}))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<title>Animations</title>
|
<title>Animations</title>
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
// #docregion
|
// #docregion
|
||||||
import rollup from 'rollup'
|
import nodeResolve from 'rollup-plugin-node-resolve';
|
||||||
import nodeResolve from 'rollup-plugin-node-resolve'
|
|
||||||
import commonjs from 'rollup-plugin-commonjs';
|
import commonjs from 'rollup-plugin-commonjs';
|
||||||
import uglify from 'rollup-plugin-uglify'
|
import uglify from 'rollup-plugin-uglify';
|
||||||
|
|
||||||
// #docregion config
|
// #docregion config
|
||||||
export default {
|
export default {
|
||||||
@ -30,5 +29,5 @@ export default {
|
|||||||
uglify()
|
uglify()
|
||||||
// #enddocregion uglify
|
// #enddocregion uglify
|
||||||
]
|
]
|
||||||
}
|
};
|
||||||
// #enddocregion config
|
// #enddocregion config
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user