ci(aio): add initial implementation for aio-builds setup

This commit is contained in:
Georgios Kalpakas 2017-02-06 20:40:28 +02:00 committed by Chuck Jazdzewski
parent 794f8f4e6a
commit 115164033b
49 changed files with 5876 additions and 11 deletions

18
aio/aio-builds-setup/build.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
set -eux -o pipefail
# Constants
DOCKERBUILD_DIR="`dirname $0`/dockerbuild"
SCRIPTS_JS_DIR="$DOCKERBUILD_DIR/scripts-js"
DEFAULT_IMAGE_NAME_AND_TAG="aio-builds:latest"
# Build `scripts-js/`
cd "$SCRIPTS_JS_DIR"
yarn install
yarn run build
cd -
# Create docker image
nameAndOptionalTag=$([ $# -eq 0 ] && echo $DEFAULT_IMAGE_NAME_AND_TAG || echo $1)
sudo docker build --tag $nameAndOptionalTag $DOCKERBUILD_DIR

View File

@ -0,0 +1,3 @@
scripts-js/lib
scripts-js/node_modules
scripts-js/**/test

View File

@ -0,0 +1,112 @@
# Image metadata and config
FROM debian:jessie
LABEL name="angular.io PR preview" \
description="This image implements the PR preview functionality for angular.io." \
vendor="Angular" \
version="1.0"
VOLUME /var/www/aio-builds
EXPOSE 80 443
ENV AIO_BUILDS_DIR=/var/www/aio-builds TEST_AIO_BUILDS_DIR=/tmp/aio-builds \
AIO_GITHUB_TOKEN= TEST_AIO_GITHUB_TOKEN= \
AIO_NGINX_HOSTNAME=nginx-prod.localhost TEST_AIO_NGINX_HOSTNAME=nginx-test.localhost \
AIO_NGINX_PORT_HTTP=80 TEST_AIO_NGINX_PORT_HTTP=8080 \
AIO_NGINX_PORT_HTTPS=443 TEST_AIO_NGINX_PORT_HTTPS=4433 \
AIO_REPO_SLUG=angular/angular TEST_AIO_REPO_SLUG= \
AIO_SCRIPTS_JS_DIR=/usr/share/aio-scripts-js \
AIO_SCRIPTS_SH_DIR=/usr/share/aio-scripts-sh \
AIO_UPLOAD_HOSTNAME=upload-prod.localhost TEST_AIO_UPLOAD_HOSTNAME=upload-test.localhost \
AIO_UPLOAD_MAX_SIZE=20971520 TEST_AIO_UPLOAD_MAX_SIZE=20971520 \
AIO_UPLOAD_PORT=3000 TEST_AIO_UPLOAD_PORT=3001 \
NODE_ENV=production
# Create directory for logs
RUN mkdir /var/log/aio
# Add extra package sources
RUN apt-get update -y && apt-get install -y curl
RUN curl --silent --show-error --location https://deb.nodesource.com/setup_6.x | bash -
RUN curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
# Install packages
RUN apt-get update -y && apt-get install -y \
chkconfig \
cron \
dnsmasq \
nano \
nginx \
nodejs \
rsyslog \
yarn
RUN yarn global add pm2
# Set up cronjobs
COPY cronjobs/aio-builds-cleanup /etc/cron.d/
RUN chmod 0744 /etc/cron.d/aio-builds-cleanup
RUN crontab /etc/cron.d/aio-builds-cleanup
# Set up dnsmasq
COPY dnsmasq/dnsmasq.conf /etc/dnsmasq.conf
RUN sed -i "s|{{\$AIO_NGINX_HOSTNAME}}|$AIO_NGINX_HOSTNAME|" /etc/dnsmasq.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|" /etc/dnsmasq.conf
RUN sed -i "s|{{\$TEST_AIO_NGINX_HOSTNAME}}|$TEST_AIO_NGINX_HOSTNAME|" /etc/dnsmasq.conf
RUN sed -i "s|{{\$TEST_AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|" /etc/dnsmasq.conf
# Set up nginx (for production and testing)
RUN rm /etc/nginx/sites-enabled/*
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$AIO_BUILDS_DIR|" /etc/nginx/sites-available/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$AIO_NGINX_PORT_HTTP|" /etc/nginx/sites-available/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$AIO_UPLOAD_HOSTNAME|" /etc/nginx/sites-available/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$AIO_UPLOAD_MAX_SIZE|" /etc/nginx/sites-available/aio-builds-prod.conf
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$AIO_UPLOAD_PORT|" /etc/nginx/sites-available/aio-builds-prod.conf
RUN ln -s /etc/nginx/sites-available/aio-builds-prod.conf /etc/nginx/sites-enabled/aio-builds-prod.conf
COPY nginx/aio-builds.conf /etc/nginx/sites-available/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_BUILDS_DIR}}|$TEST_AIO_BUILDS_DIR|" /etc/nginx/sites-available/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_NGINX_PORT_HTTP}}|$TEST_AIO_NGINX_PORT_HTTP|" /etc/nginx/sites-available/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_HOSTNAME}}|$TEST_AIO_UPLOAD_HOSTNAME|" /etc/nginx/sites-available/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_MAX_SIZE}}|$TEST_AIO_UPLOAD_MAX_SIZE|" /etc/nginx/sites-available/aio-builds-test.conf
RUN sed -i "s|{{\$AIO_UPLOAD_PORT}}|$TEST_AIO_UPLOAD_PORT|" /etc/nginx/sites-available/aio-builds-test.conf
RUN ln -s /etc/nginx/sites-available/aio-builds-test.conf /etc/nginx/sites-enabled/aio-builds-test.conf
# Set up pm2
RUN pm2 startup systemv -u root > /dev/null \
# Ugly!
|| echo " ---> Working around https://github.com/Unitech/pm2/commit/a788e523e#commitcomment-20851443" \
&& chkconfig --add pm2 > /dev/null
RUN chkconfig pm2 on
# Set up the shell scripts
COPY scripts-sh/ $AIO_SCRIPTS_SH_DIR/
RUN chmod a+x $AIO_SCRIPTS_SH_DIR/*
RUN find $AIO_SCRIPTS_SH_DIR -maxdepth 1 -type f -printf "%P\n" \
| while read file; do ln -s $AIO_SCRIPTS_SH_DIR/$file /usr/local/bin/aio-${file%.*}; done
# Set up the Node.js scripts
COPY scripts-js/ $AIO_SCRIPTS_JS_DIR/
WORKDIR $AIO_SCRIPTS_JS_DIR/
RUN yarn install --production
# Set up health check
HEALTHCHECK --interval=5m CMD /usr/local/bin/aio-health-check
# Go!
WORKDIR /
CMD aio-init && tail -f /dev/null

View File

@ -0,0 +1,2 @@
# Periodically clean up builds that do not correspond to currently open PRs
0 4 * * * root /usr/local/bin/aio-clean-up >> /var/log/cron.log 2>&1

View File

@ -0,0 +1,16 @@
# Do not read /etc/resolv.conf. Get servers from this file instead.
no-resolv
server=8.8.8.8
server=8.8.4.4
# Listen for DHCP and DNS requests only on this address.
listen-address=127.0.0.1
# Force an IP addres for these domains.
address=/{{$AIO_NGINX_HOSTNAME}}/127.0.0.1
address=/{{$AIO_UPLOAD_HOSTNAME}}/127.0.0.1
address=/{{$TEST_AIO_NGINX_HOSTNAME}}/127.0.0.1
address=/{{$TEST_AIO_UPLOAD_HOSTNAME}}/127.0.0.1
# Run as root (required from inside docker container).
user=root

View File

@ -0,0 +1,56 @@
# Serve PR-preview requests
server {
listen {{$AIO_NGINX_PORT_HTTP}};
listen [::]:{{$AIO_NGINX_PORT_HTTP}};
server_name "~^pr(?<pr>[1-9][0-9]*)-(?<sha>[0-9a-f]{40})\.";
root {{$AIO_BUILDS_DIR}}/$pr/$sha;
disable_symlinks on from=$document_root;
index index.html;
location / {
try_files $uri $uri/ =404;
}
}
# Handle all other requests
server {
listen {{$AIO_NGINX_PORT_HTTP}} default_server;
listen [::]:{{$AIO_NGINX_PORT_HTTP}};
server_name _;
# Health check
location "~^\/health-check\/?$" {
add_header Content-Type text/plain;
return 200 '';
}
# Upload builds
location "~^\/create-build\/(?<pr>[1-9][0-9]*)\/(?<sha>[0-9a-f]{40})\/?$" {
if ($request_method != "POST") {
add_header Allow "POST";
return 405;
}
client_body_temp_path /tmp/aio-create-builds;
client_body_buffer_size 128K;
client_max_body_size {{$AIO_UPLOAD_MAX_SIZE}};
client_body_in_file_only on;
proxy_pass_request_headers on;
proxy_set_header X-FILE $request_body_file;
proxy_set_body off;
proxy_redirect off;
proxy_method GET;
proxy_pass http://{{$AIO_UPLOAD_HOSTNAME}}:{{$AIO_UPLOAD_PORT}}$request_uri;
resolver 127.0.0.1;
}
# Everything else
location / {
return 404;
}
}

View File

@ -0,0 +1 @@
/dist/

View File

@ -0,0 +1,72 @@
// Imports
import * as fs from 'fs';
import * as path from 'path';
import * as shell from 'shelljs';
import {GithubPullRequests} from '../common/github-pull-requests';
// Classes
export class BuildCleaner {
// Constructor
constructor(protected buildsDir: string, protected repoSlug: string, protected githubToken?: string) {
if (!buildsDir) {
throw new Error('Missing required parameter \'buildsDir\'!');
} else if (!repoSlug) {
throw new Error('Missing required parameter \'repoSlug\'!');
}
}
// Methods - Public
public cleanUp(): Promise<void> {
return Promise.all([
this.getExistingBuildNumbers(),
this.getOpenPrNumbers(),
]).then(([existingBuilds, openPrs]) => this.removeUnnecessaryBuilds(existingBuilds, openPrs));
}
// Methods - Protected
protected getExistingBuildNumbers(): Promise<number[]> {
return new Promise((resolve, reject) => {
fs.readdir(this.buildsDir, (err, files) => {
if (err) {
return reject(err);
}
const buildNumbers = files.
map(Number). // Convert string to number
filter(Boolean); // Ignore NaN (or 0), because they are not builds
resolve(buildNumbers);
});
});
}
protected getOpenPrNumbers(): Promise<number[]> {
const githubPullRequests = new GithubPullRequests(this.repoSlug, this.githubToken);
return githubPullRequests.
fetchAll('open').
then(prs => prs.map(pr => pr.number));
}
protected removeDir(dir: string) {
try {
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
(shell as any).chmod('-R', 'a+w', dir);
shell.rm('-rf', dir);
} catch (err) {
console.error(`ERROR: Unable to remove '${dir}' due to:`, err);
}
}
protected removeUnnecessaryBuilds(existingBuildNumbers: number[], openPrNumbers: number[]) {
const toRemove = existingBuildNumbers.filter(num => !openPrNumbers.includes(num));
console.log(`Existing builds: ${existingBuildNumbers.length}`);
console.log(`Open pull requests: ${openPrNumbers.length}`);
console.log(`Removing ${toRemove.length} build(s): ${toRemove.join(', ')}`);
toRemove.
map(num => path.join(this.buildsDir, String(num))).
forEach(dir => this.removeDir(dir));
}
}

View File

@ -0,0 +1,21 @@
// Imports
import {getEnvVar} from '../common/utils';
import {BuildCleaner} from './build-cleaner';
// Constants
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN', true);
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG');
// Run
_main();
// Functions
function _main() {
const buildCleaner = new BuildCleaner(AIO_BUILDS_DIR, AIO_REPO_SLUG, AIO_GITHUB_TOKEN);
buildCleaner.cleanUp().catch(err => {
console.error('ERROR:', err);
process.exit(1);
});
}

View File

@ -0,0 +1,97 @@
// Imports
import {IncomingMessage} from 'http';
import * as https from 'https';
// Constants
const GITHUB_HOSTNAME = 'api.github.com';
// Interfaces - Types
interface RequestParams {
[key: string]: string | number;
}
type RequestParamsOrNull = RequestParams | null;
// Classes
export class GithubApi {
protected requestHeaders: {[key: string]: string};
// Constructor
constructor(protected repoSlug: string, githubToken?: string) {
if (!repoSlug) {
throw new Error('Missing required parameter \'repoSlug\'!');
}
if (!githubToken) {
console.warn('No GitHub access-token specified. Requests will be unauthenticated.');
}
this.requestHeaders = {'User-Agent': `Node/${process.versions.node}`};
if (githubToken) {
this.requestHeaders['Authorization'] = `token ${githubToken}`;
}
}
// Methods - Public
public get<T>(pathname: string, params?: RequestParamsOrNull): Promise<T> {
const path = this.buildPath(pathname, params);
return this.request<T>('get', path);
}
public post<T>(pathname: string, params?: RequestParamsOrNull, data?: any): Promise<T> {
const path = this.buildPath(pathname, params);
return this.request<T>('post', path, data);
}
// Methods - Protected
protected buildPath(pathname: string, params?: RequestParamsOrNull): string {
if (params == null) {
return pathname;
}
const search = (params === null) ? '' : this.serializeSearchParams(params);
const joiner = search && '?';
return `${pathname}${joiner}${search}`;
}
protected request<T>(method: string, path: string, data: any = null): Promise<T> {
return new Promise<T>((resolve, reject) => {
const options = {
headers: {...this.requestHeaders},
host: GITHUB_HOSTNAME,
method,
path,
};
const onError = (statusCode: number, responseText: string) => {
const url = `https://${GITHUB_HOSTNAME}${path}`;
reject(`Request to '${url}' failed (status: ${statusCode}): ${responseText}`);
};
const onSuccess = (responseText: string) => {
try { resolve(JSON.parse(responseText)); } catch (err) { reject(err); }
};
const onResponse = (res: IncomingMessage) => {
const statusCode = res.statusCode || -1;
const isSuccess = (200 <= statusCode) && (statusCode < 400);
let responseText = '';
res.
on('data', d => responseText += d).
on('end', () => isSuccess ? onSuccess(responseText) : onError(statusCode, responseText)).
on('error', reject);
};
https.
request(options, onResponse).
on('error', reject).
end(data && JSON.stringify(data));
});
}
protected serializeSearchParams(params: RequestParams): string {
return Object.keys(params).
filter(key => params[key] != null).
map(key => `${key}=${encodeURIComponent(String(params[key]))}`).
join('&');
}
}

View File

@ -0,0 +1,51 @@
// Imports
import {GithubApi} from './github-api';
// Interfaces - Types
interface PullRequest {
number: number;
}
export type PullRequestState = 'all' | 'closed' | 'open';
// Classes
export class GithubPullRequests extends GithubApi {
// Methods - Public
public addComment(pr: number, body: string): Promise<void> {
if (!(pr > 0)) {
throw new Error(`Invalid PR number: ${pr}`);
} else if (!body) {
throw new Error(`Invalid or empty comment body: ${body}`);
}
return this.post<void>(`/repos/${this.repoSlug}/issues/${pr}/comments`, null, {body});
}
public fetchAll(state: PullRequestState = 'all'): Promise<PullRequest[]> {
process.stdout.write(`Fetching ${state} pull requests...`);
return this.fetchUntilDone(state, 0);
}
// Methods - Protected
protected fetchUntilDone(state: PullRequestState, currentPage: number): Promise<PullRequest[]> {
process.stdout.write('.');
const perPage = 100;
const pathname = `/repos/${this.repoSlug}/pulls`;
const params = {
page: currentPage,
per_page: perPage,
state,
};
return this.get<PullRequest[]>(pathname, params).then(pullRequests => {
if (pullRequests.length < perPage) {
console.log('done');
return pullRequests;
}
return this.fetchUntilDone(state, currentPage + 1).
then(morePullRequests => [...pullRequests, ...morePullRequests]);
});
}
}

View File

@ -0,0 +1,23 @@
export const runTests = (specFiles: string[], helpers?: string[]) => {
// We can't use `import` here, because of the following mess:
// - GitHub project `jasmine/jasmine` is `jasmine-core` on npm and its typings `@types/jasmine`.
// - GitHub project `jasmine/jasmine-npm` is `jasmine` on npm and has no typings.
//
// Using `import...from 'jasmine'` here, would import from `@types/jasmine` (which refers to the
// `jasmine-core` module and the `jasmine` module).
// tslint:disable-next-line: no-var-requires variable-name
const Jasmine = require('jasmine');
const config = {
helpers,
random: true,
spec_files: specFiles,
stopSpecOnExpectationFailure: true,
};
process.on('unhandledRejection', (reason: any) => console.log('Unhandled rejection:', reason));
const runner = new Jasmine();
runner.loadConfig(config);
runner.onComplete((passed: boolean) => process.exit(passed ? 0 : 1));
runner.execute();
};

View File

@ -0,0 +1,11 @@
// Functions
export const getEnvVar = (name: string, isOptional = false): string => {
const value = process.env[name];
if (!isOptional && !value) {
console.error(`ERROR: Missing required environment variable '${name}'!`);
process.exit(1);
}
return value || '';
};

View File

@ -0,0 +1,83 @@
// Imports
import * as cp from 'child_process';
import {EventEmitter} from 'events';
import * as fs from 'fs';
import * as path from 'path';
import * as shell from 'shelljs';
import {CreatedBuildEvent} from './build-events';
import {UploadError} from './upload-error';
// Classes
export class BuildCreator extends EventEmitter {
// Constructor
constructor(protected buildsDir: string) {
super();
if (!buildsDir) {
throw new Error('Missing or empty required parameter \'buildsDir\'!');
}
}
// Methods - Public
public create(pr: string, sha: string, archivePath: string): Promise<any> {
const prDir = path.join(this.buildsDir, pr);
const shaDir = path.join(prDir, sha);
let dirToRemoveOnError: string;
return Promise.
all([this.exists(prDir), this.exists(shaDir)]).
then(([prDirExisted, shaDirExisted]) => {
if (shaDirExisted) {
throw new UploadError(403, `Request to overwrite existing directory: ${shaDir}`);
}
dirToRemoveOnError = prDirExisted ? shaDir : prDir;
return Promise.resolve().
then(() => shell.mkdir('-p', shaDir)).
then(() => this.extractArchive(archivePath, shaDir)).
then(() => this.emit(CreatedBuildEvent.type, new CreatedBuildEvent(+pr, sha)));
}).
catch(err => {
if (dirToRemoveOnError) {
shell.rm('-rf', dirToRemoveOnError);
}
if (!(err instanceof UploadError)) {
err = new UploadError(500, `Error while uploading to directory: ${shaDir}\n${err}`);
}
throw err;
});
}
// Methods - Protected
protected exists(fileOrDir: string): Promise<boolean> {
return new Promise(resolve => fs.access(fileOrDir, err => resolve(!err)));
}
protected extractArchive(inputFile: string, outputDir: string): Promise<void> {
return new Promise<void>((resolve, reject) => {
const cmd = `tar --extract --gzip --directory "${outputDir}" --file "${inputFile}"`;
cp.exec(cmd, (err, _stdout, stderr) => {
if (err) {
return reject(err);
}
if (stderr) {
console.warn(stderr);
}
try {
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
(shell as any).chmod('-R', 'a-w', outputDir);
shell.rm('-f', inputFile);
resolve();
} catch (err) {
reject(err);
}
});
});
}
}

View File

@ -0,0 +1,15 @@
// Classes
export class BuildEvent {
// Constructor
constructor(public type: string, public pr: number, public sha: string) {}
}
export class CreatedBuildEvent extends BuildEvent {
// Properties - Public, Static
public static type = 'build.created';
// Constructor
constructor(pr: number, sha: string) {
super(CreatedBuildEvent.type, pr, sha);
}
}

View File

@ -0,0 +1,42 @@
// TODO(gkalpak): Find more suitable way to run as `www-data`.
process.setuid('www-data');
// Imports
import {GithubPullRequests} from '../common/github-pull-requests';
import {getEnvVar} from '../common/utils';
import {CreatedBuildEvent} from './build-events';
import {uploadServerFactory} from './upload-server-factory';
// Constants
const AIO_BUILDS_DIR = getEnvVar('AIO_BUILDS_DIR');
const AIO_GITHUB_TOKEN = getEnvVar('AIO_GITHUB_TOKEN', true);
const AIO_REPO_SLUG = getEnvVar('AIO_REPO_SLUG', true);
const AIO_UPLOAD_HOSTNAME = getEnvVar('AIO_UPLOAD_HOSTNAME');
const AIO_UPLOAD_PORT = +getEnvVar('AIO_UPLOAD_PORT');
// Run
_main();
// Functions
function _main() {
uploadServerFactory.
create(AIO_BUILDS_DIR).
on(CreatedBuildEvent.type, createOnBuildCreatedHanlder()).
listen(AIO_UPLOAD_PORT, AIO_UPLOAD_HOSTNAME);
}
function createOnBuildCreatedHanlder() {
if (!AIO_REPO_SLUG) {
console.warn('No repo specified. Preview links will not be posted on PRs.');
return () => null;
}
const githubPullRequests = new GithubPullRequests(AIO_REPO_SLUG, AIO_GITHUB_TOKEN);
return ({pr, sha}: CreatedBuildEvent) => {
const body = `The angular.io preview for ${sha.slice(0, 7)} is available [here][1].\n\n` +
`[1]: https://pr${pr}-${sha}.ngbuilds.io/`;
githubPullRequests.addComment(pr, body);
};
}

View File

@ -0,0 +1,8 @@
// Classes
export class UploadError extends Error {
// Constructor
constructor(public status: number = 500, message?: string) {
super(message);
Object.setPrototypeOf(this, UploadError.prototype);
}
}

View File

@ -0,0 +1,76 @@
// Imports
import * as express from 'express';
import * as http from 'http';
import {BuildCreator} from './build-creator';
import {CreatedBuildEvent} from './build-events';
import {UploadError} from './upload-error';
// Constants
const X_FILE_HEADER = 'X-FILE';
// Classes
class UploadServerFactory {
// Methods - Public
public create(buildsDir: string): http.Server {
if (!buildsDir) {
throw new Error('Missing or empty required parameter \'buildsDir\'!');
}
const buildCreator = new BuildCreator(buildsDir);
const middleware = this.createMiddleware(buildCreator);
const httpServer = http.createServer(middleware);
buildCreator.on(CreatedBuildEvent.type, (data: CreatedBuildEvent) => httpServer.emit(CreatedBuildEvent.type, data));
httpServer.on('listening', () => {
const info = httpServer.address();
console.info(`Up and running (and listening on ${info.address}:${info.port})...`);
});
return httpServer;
}
// Methods - Protected
protected createMiddleware(buildCreator: BuildCreator): express.Express {
const middleware = express();
middleware.get(/^\/create-build\/([1-9][0-9]*)\/([0-9a-f]{40})\/?$/, (req, res) => {
const pr = req.params[0];
const sha = req.params[1];
const archive = req.header(X_FILE_HEADER);
if (!archive) {
this.throwRequestError(400, `Missing or empty '${X_FILE_HEADER}' header`, req);
}
buildCreator.
create(pr, sha, archive).
then(() => res.sendStatus(201)).
catch(err => this.respondWithError(res, err));
});
middleware.get(/^\/health-check\/?$/, (_req, res) => res.sendStatus(200));
middleware.get('*', req => this.throwRequestError(404, 'Unknown resource', req));
middleware.all('*', req => this.throwRequestError(405, 'Unsupported method', req));
middleware.use((err: any, _req: any, res: express.Response, _next: any) => this.respondWithError(res, err));
return middleware;
}
protected respondWithError(res: express.Response, err: any) {
if (!(err instanceof UploadError)) {
err = new UploadError(500, String((err && err.message) || err));
}
const statusText = http.STATUS_CODES[err.status] || '???';
console.error(`Upload error: ${err.status} - ${statusText}`);
console.error(err.message);
res.status(err.status).end(err.message);
}
protected throwRequestError(status: number, error: string, req: express.Request) {
throw new UploadError(status, `${error} in request: ${req.method} ${req.originalUrl}`);
}
}
// Exports
export const uploadServerFactory = new UploadServerFactory();

View File

@ -0,0 +1,197 @@
// Imports
import * as cp from 'child_process';
import * as fs from 'fs';
import * as http from 'http';
import * as path from 'path';
import * as shell from 'shelljs';
import {getEnvVar} from '../common/utils';
// Constans
const SERVER_USER = 'www-data';
const TEST_AIO_BUILDS_DIR = getEnvVar('TEST_AIO_BUILDS_DIR');
const TEST_AIO_NGINX_HOSTNAME = getEnvVar('TEST_AIO_NGINX_HOSTNAME');
const TEST_AIO_NGINX_PORT_HTTP = +getEnvVar('TEST_AIO_NGINX_PORT_HTTP');
const TEST_AIO_NGINX_PORT_HTTPS = +getEnvVar('TEST_AIO_NGINX_PORT_HTTPS');
const TEST_AIO_UPLOAD_HOSTNAME = getEnvVar('TEST_AIO_UPLOAD_HOSTNAME');
const TEST_AIO_UPLOAD_MAX_SIZE = +getEnvVar('TEST_AIO_UPLOAD_MAX_SIZE');
const TEST_AIO_UPLOAD_PORT = +getEnvVar('TEST_AIO_UPLOAD_PORT');
// Interfaces - Types
export interface CleanUpFn extends Function {}
export interface CmdResult { success: boolean; err: Error; stdout: string; stderr: string; }
export interface FileSpecs { content?: string; size?: number; }
export interface TestSuiteFactory { (scheme: string, port: number): void }
export interface VerifyCmdResultFn { (result: CmdResult): void; }
// Classes
class Helper {
// Properties - Public
public get buildsDir() { return TEST_AIO_BUILDS_DIR; }
public get nginxHostname() { return TEST_AIO_NGINX_HOSTNAME; }
public get nginxPortHttp() { return TEST_AIO_NGINX_PORT_HTTP; }
public get nginxPortHttps() { return TEST_AIO_NGINX_PORT_HTTPS; }
public get serverUser() { return SERVER_USER; }
public get uploadHostname() { return TEST_AIO_UPLOAD_HOSTNAME; }
public get uploadPort() { return TEST_AIO_UPLOAD_PORT; }
public get uploadMaxSize() { return TEST_AIO_UPLOAD_MAX_SIZE; }
// Properties - Protected
protected cleanUpFns: CleanUpFn[] = [];
protected portPerScheme: {[scheme: string]: number} = {
http: this.nginxPortHttp,
https: this.nginxPortHttps,
};
// Constructor
constructor() {
shell.mkdir('-p', this.buildsDir);
shell.exec(`chown -R ${this.serverUser} ${this.buildsDir}`);
}
// Methods - Public
public cleanUp() {
while (this.cleanUpFns.length) {
// Clean-up fns remove themselves from the list.
this.cleanUpFns[0]();
}
if (fs.readdirSync(this.buildsDir).length) {
throw new Error(`Directory '${this.buildsDir}' is not empty after clean-up.`);
}
}
public createDummyArchive(pr: string, sha: string, archivePath: string): CleanUpFn {
const inputDir = path.join(this.buildsDir, 'uploaded', pr, sha);
const cmd1 = `tar --create --gzip --directory "${inputDir}" --file "${archivePath}" .`;
const cmd2 = `chown ${this.serverUser} ${archivePath}`;
const cleanUpTemp = this.createDummyBuild(`uploaded/${pr}`, sha, true);
shell.exec(cmd1);
shell.exec(cmd2);
cleanUpTemp();
return this.createCleanUpFn(() => shell.rm('-rf', archivePath));
}
public createDummyBuild(pr: string, sha: string, force = false): CleanUpFn {
const prDir = path.join(this.buildsDir, pr);
const shaDir = path.join(prDir, sha);
const idxPath = path.join(shaDir, 'index.html');
const barPath = path.join(shaDir, 'foo', 'bar.js');
this.writeFile(idxPath, {content: `PR: ${pr} | SHA: ${sha} | File: /index.html`}, force);
this.writeFile(barPath, {content: `PR: ${pr} | SHA: ${sha} | File: /foo/bar.js`}, force);
shell.exec(`chown -R ${this.serverUser} ${prDir}`);
return this.createCleanUpFn(() => shell.rm('-rf', prDir));
}
public deletePrDir(pr: string) {
const prDir = path.join(this.buildsDir, pr);
if (fs.existsSync(prDir)) {
// Undocumented signature (see https://github.com/shelljs/shelljs/pull/663).
(shell as any).chmod('-R', 'a+w', prDir);
shell.rm('-rf', prDir);
}
}
public readBuildFile(pr: string, sha: string, relFilePath: string): string {
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
return fs.readFileSync(absFilePath, 'utf8');
}
public runCmd(cmd: string, opts: cp.ExecFileOptions = {}): Promise<CmdResult> {
return new Promise(resolve => {
const proc = cp.exec(cmd, opts, (err, stdout, stderr) => resolve({success: !err, err, stdout, stderr}));
this.createCleanUpFn(() => proc.kill());
});
}
public runForAllSupportedSchemes(suiteFactory: TestSuiteFactory) {
Object.keys(this.portPerScheme).forEach(scheme => {
// TODO (gkalpak): Enable HTTPS tests
if (scheme === 'https') {
return it('should have tests');
}
suiteFactory(scheme, this.portPerScheme[scheme]);
});
}
public verifyResponse(status: number | (number | string)[], regex = /^/): VerifyCmdResultFn {
let statusCode: number;
let statusText: string;
if (Array.isArray(status)) {
statusCode = status[0] as number;
statusText = status[1] as string;
} else {
statusCode = status;
statusText = http.STATUS_CODES[statusCode];
}
return (result: CmdResult) => {
const [headers, body] = result.stdout.
split(/(?:\r?\n){2,}/).
map(s => s.trim()).
slice(-2);
if (!result.success) {
console.log('Stdout:', result.stdout);
console.log('Stderr:', result.stderr);
console.log('Error:', result.err);
}
expect(result.success).toBe(true);
expect(headers).toContain(`${statusCode} ${statusText}`);
expect(body).toMatch(regex);
};
}
public writeBuildFile(pr: string, sha: string, relFilePath: string, content: string): CleanUpFn {
const absFilePath = path.join(this.buildsDir, pr, sha, relFilePath);
return this.writeFile(absFilePath, {content}, true);
}
public writeFile(filePath: string, {content, size}: FileSpecs, force = false): CleanUpFn {
if (!force && fs.existsSync(filePath)) {
throw new Error(`Refusing to overwrite existing file '${filePath}'.`);
}
let cleanUpTarget = filePath;
while (!fs.existsSync(path.dirname(cleanUpTarget))) {
cleanUpTarget = path.dirname(cleanUpTarget);
}
shell.mkdir('-p', path.dirname(filePath));
if (size) {
// Create a file of the specified size.
cp.execSync(`fallocate -l ${size} ${filePath}`);
} else {
// Create a file with the specified content.
fs.writeFileSync(filePath, content || '');
}
shell.exec(`chown ${this.serverUser} ${filePath}`);
return this.createCleanUpFn(() => shell.rm('-rf', cleanUpTarget));
}
// Methods - Protected
protected createCleanUpFn(fn: Function): CleanUpFn {
const cleanUpFn = () => {
const idx = this.cleanUpFns.indexOf(cleanUpFn);
if (idx !== -1) {
this.cleanUpFns.splice(idx, 1);
fn();
}
};
this.cleanUpFns.push(cleanUpFn);
return cleanUpFn;
}
}
// Exports
export const helper = new Helper();

View File

@ -0,0 +1,6 @@
// Imports
import {runTests} from '../common/run-tests';
// Run
const specFiles = [`${__dirname}/**/*.e2e.js`];
runTests(specFiles);

View File

@ -0,0 +1,232 @@
// Imports
import * as path from 'path';
import {helper as h} from './helper';
// Tests
h.runForAllSupportedSchemes((scheme, port) => describe(`nginx (on ${scheme.toUpperCase()})`, () => {
const hostname = h.nginxHostname;
const host = `${hostname}:${port}`;
const pr = '9';
const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40);
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
afterEach(() => h.cleanUp());
describe(`pr<pr>-<sha>.${host}/*`, () => {
beforeEach(() => {
h.createDummyBuild(pr, sha9);
h.createDummyBuild(pr, sha0);
});
it('should return /index.html', done => {
const origin = `${scheme}://pr${pr}-${sha9}.${host}`;
const bodyegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /index\\.html$`);
Promise.all([
h.runCmd(`curl -iL ${origin}/index.html`).then(h.verifyResponse(200, bodyegex)),
h.runCmd(`curl -iL ${origin}/`).then(h.verifyResponse(200, bodyegex)),
h.runCmd(`curl -iL ${origin}`).then(h.verifyResponse(200, bodyegex)),
]).then(done);
});
it('should return /foo/bar.js', done => {
const bodyegex = new RegExp(`^PR: ${pr} | SHA: ${sha9} | File: /foo/bar\\.js$`);
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/bar.js`).
then(h.verifyResponse(200, bodyegex)).
then(done);
});
it('should respond with 403 for directories', done => {
Promise.all([
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/`).then(h.verifyResponse(403)),
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo`).then(h.verifyResponse(403)),
]).then(done);
});
it('should respond with 404 for unknown paths', done => {
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}.${host}/foo/baz.css`).
then(h.verifyResponse(404)).
then(done);
});
it('should respond with 404 for unknown PRs/SHAs', done => {
const otherPr = 54321;
const otherSha = '8'.repeat(40);
Promise.all([
h.runCmd(`curl -iL ${scheme}://pr${pr}9-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${otherPr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha9}9.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${pr}-${otherSha}.${host}`).then(h.verifyResponse(404)),
]).then(done);
});
it('should respond with 404 if the subdomain format is wrong', done => {
Promise.all([
h.runCmd(`curl -iL ${scheme}://xpr${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://prx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://xx${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://p${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://r${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${pr}-${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${pr}${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${pr}_${sha9}.${host}`).then(h.verifyResponse(404)),
]).then(done);
});
it('should reject PRs with leading zeros', done => {
h.runCmd(`curl -iL ${scheme}://pr0${pr}-${sha9}.${host}`).
then(h.verifyResponse(404)).
then(done);
});
it('should accept SHAs with leading zeros (but not ignore them)', done => {
const bodyegex = new RegExp(`^PR: ${pr} | SHA: ${sha0} | File: /index\\.html$`);
Promise.all([
h.runCmd(`curl -iL ${scheme}://pr${pr}-0${sha9}.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha0}.${host}`).then(h.verifyResponse(200, bodyegex)),
]).then(done);
});
});
describe(`${host}/health-check`, () => {
it('should respond with 200', done => {
Promise.all([
h.runCmd(`curl -iL ${scheme}://${host}/health-check`).then(h.verifyResponse(200)),
h.runCmd(`curl -iL ${scheme}://${host}/health-check/`).then(h.verifyResponse(200)),
]).then(done);
});
it('should respond with 404 if the path does not match exactly', done => {
Promise.all([
h.runCmd(`curl -iL ${scheme}://${host}/health-check/foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/health-check-foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/health-checknfoo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo/health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo-health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foonhealth-check`).then(h.verifyResponse(404)),
]).then(done);
});
});
describe(`${host}/create-build/<pr>/<sha>`, () => {
it('should disallow non-POST requests', done => {
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
Promise.all([
h.runCmd(`curl -iLX GET ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse([405, 'Not Allowed'])),
]).then(done);
});
it(`should reject files larger than ${h.uploadMaxSize}B (according to header)`, done => {
const headers = `--header "Content-Length: ${1.5 * h.uploadMaxSize}"`;
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
h.runCmd(`curl -iLX POST ${headers} ${url}`).
then(h.verifyResponse([413, 'Request Entity Too Large'])).
then(done);
});
it(`should reject files larger than ${h.uploadMaxSize}B (without header)`, done => {
const filePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const url = `${scheme}://${host}/create-build/${pr}/${sha9}`;
h.writeFile(filePath, {size: 1.5 * h.uploadMaxSize});
h.runCmd(`curl -iLX POST --data-binary "@${filePath}" ${url}`).
then(h.verifyResponse([413, 'Request Entity Too Large'])).
then(done);
});
it('should pass requests through to the upload server', done => {
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(400, /Missing or empty 'X-FILE' header/)).
then(done);
});
it('should respond with 404 for unknown paths', done => {
const cmdPrefix = `curl -iLX POST ${scheme}://${host}`;
Promise.all([
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
]).then(done);
});
it('should reject PRs with leading zeros', done => {
h.runCmd(`curl -iLX POST ${scheme}://${host}/create-build/0${pr}/${sha9}`).
then(h.verifyResponse(404)).
then(done);
});
it('should accept SHAs with leading zeros (but not ignore them)', done => {
const cmdPrefix = `curl -iLX POST ${scheme}://${host}/create-build/${pr}`;
const bodyRegex = /Missing or empty 'X-FILE' header/;
Promise.all([
h.runCmd(`${cmdPrefix}/0${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/${sha0}`).then(h.verifyResponse(400, bodyRegex)),
]).then(done);
});
});
describe(`${host}/*`, () => {
it('should respond with 404 for unkown URLs (even if the resource exists)', done => {
['index.html', 'foo.js', 'foo/index.html'].forEach(relFilePath => {
const absFilePath = path.join(h.buildsDir, relFilePath);
h.writeFile(absFilePath, {content: `File: /${relFilePath}`});
});
Promise.all([
h.runCmd(`curl -iL ${scheme}://${host}/index.html`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://foo.${host}/index.html`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://foo.${host}/`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://foo.${host}`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo.js`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL ${scheme}://${host}/foo/index.html`).then(h.verifyResponse(404)),
]).then(done);
});
});
}));

View File

@ -0,0 +1,82 @@
// Imports
import * as path from 'path';
import {helper as h} from './helper';
// Tests
h.runForAllSupportedSchemes((scheme, port) => describe(`integration (on ${scheme.toUpperCase()})`, () => {
const hostname = h.nginxHostname;
const host = `${hostname}:${port}`;
const pr9 = '9';
const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40);
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
const getFile = (pr: string, sha: string, file: string) =>
h.runCmd(`curl -iL ${scheme}://pr${pr}-${sha}.${host}/${file}`);
const uploadBuild = (pr: string, sha: string, archive: string) =>
h.runCmd(`curl -iLX POST --data-binary "@${archive}" ${scheme}://${host}/create-build/${pr}/${sha}`);
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
afterEach(() => {
h.deletePrDir(pr9);
h.cleanUp();
});
it('should be able to upload and serve a build for a new PR', done => {
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyArchive(pr9, sha9, archivePath);
uploadBuild(pr9, sha9, archivePath).
then(() => Promise.all([
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
])).
then(done);
});
it('should be able to upload and serve a build for an existing PR', done => {
const regexPrefix0 = `^PR: ${pr9} \\| SHA: ${sha0} \\| File:`;
const idxContentRegex0 = new RegExp(`${regexPrefix0} \\/index\\.html$`);
const barContentRegex0 = new RegExp(`${regexPrefix0} \\/foo\\/bar\\.js$`);
const regexPrefix9 = `^PR: uploaded\\/${pr9} \\| SHA: ${sha9} \\| File:`;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyBuild(pr9, sha0);
h.createDummyArchive(pr9, sha9, archivePath);
uploadBuild(pr9, sha9, archivePath).
then(() => Promise.all([
getFile(pr9, sha0, 'index.html').then(h.verifyResponse(200, idxContentRegex0)),
getFile(pr9, sha0, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex0)),
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
])).
then(done);
});
it('should not be able to overwrite a build', done => {
const regexPrefix9 = `^PR: ${pr9} \\| SHA: ${sha9} \\| File:`;
const idxContentRegex9 = new RegExp(`${regexPrefix9} \\/index\\.html$`);
const barContentRegex9 = new RegExp(`${regexPrefix9} \\/foo\\/bar\\.js$`);
h.createDummyBuild(pr9, sha9);
h.createDummyArchive(pr9, sha9, archivePath);
uploadBuild(pr9, sha9, archivePath).
then(h.verifyResponse(403)).
then(() => Promise.all([
getFile(pr9, sha9, 'index.html').then(h.verifyResponse(200, idxContentRegex9)),
getFile(pr9, sha9, 'foo/bar.js').then(h.verifyResponse(200, barContentRegex9)),
])).
then(done);
});
}));

View File

@ -0,0 +1,249 @@
// Imports
import * as fs from 'fs';
import * as path from 'path';
import {CmdResult, helper as h} from './helper';
// Tests
describe('upload-server (on HTTP)', () => {
const hostname = h.uploadHostname;
const port = h.uploadPort;
const host = `${hostname}:${port}`;
const pr = '9';
const sha9 = '9'.repeat(40);
const sha0 = '0'.repeat(40);
beforeEach(() => jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000);
afterEach(() => h.cleanUp());
describe(`${host}/create-build/<pr>/<sha>`, () => {
const curl = `curl -iL --header "X-FILE: ${h.buildsDir}/snapshot.tar.gz"`;
it('should disallow non-GET requests', done => {
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = /^Unsupported method/;
Promise.all([
h.runCmd(`curl -iLX PUT ${url}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX POST ${url}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX PATCH ${url}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX DELETE ${url}`).then(h.verifyResponse(405, bodyRegex)),
]).then(done);
});
it('should reject requests without an \'X-FILE\' header', done => {
const headers = '--header "X-FILE: "';
const url = `http://${host}/create-build/${pr}/${sha9}`;
const bodyRegex = /^Missing or empty 'X-FILE' header/;
Promise.all([
h.runCmd(`curl -iL ${url}`).then(h.verifyResponse(400, bodyRegex)),
h.runCmd(`curl -iL ${headers} ${url}`).then(h.verifyResponse(400, bodyRegex)),
]).then(done);
});
it('should respond with 404 for unknown paths', done => {
const cmdPrefix = `${curl} http://${host}`;
Promise.all([
h.runCmd(`${cmdPrefix}/foo/create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/foo-create-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/fooncreate-build/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build-foo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-buildnfoo/${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/pr${pr}/${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${cmdPrefix}/create-build/${pr}/${sha9}42`).then(h.verifyResponse(404)),
]).then(done);
});
it('should reject PRs with leading zeros', done => {
h.runCmd(`${curl} http://${host}/create-build/0${pr}/${sha9}`).
then(h.verifyResponse(404)).
then(done);
});
it('should accept SHAs with leading zeros (but not ignore them)', done => {
Promise.all([
h.runCmd(`${curl} http://${host}/create-build/${pr}/0${sha9}`).then(h.verifyResponse(404)),
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha0}`).then(h.verifyResponse(500)),
]).then(done);
});
it('should not overwrite existing builds', done => {
h.createDummyBuild(pr, sha9);
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain('index.html');
h.writeBuildFile(pr, sha9, 'index.html', 'My content');
expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content');
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(403, /^Request to overwrite existing directory/)).
then(() => expect(h.readBuildFile(pr, sha9, 'index.html')).toBe('My content')).
then(done);
});
it('should delete the PR directory on error (for new PR)', done => {
const prDir = path.join(h.buildsDir, pr);
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(500)).
then(() => expect(fs.existsSync(prDir)).toBe(false)).
then(done);
});
it('should only delete the SHA directory on error (for existing PR)', done => {
const prDir = path.join(h.buildsDir, pr);
const shaDir = path.join(prDir, sha9);
h.createDummyBuild(pr, sha0);
h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`).
then(h.verifyResponse(500)).
then(() => {
expect(fs.existsSync(shaDir)).toBe(false);
expect(fs.existsSync(prDir)).toBe(true);
}).
then(done);
});
describe('on successful upload', () => {
const archivePath = path.join(h.buildsDir, 'snapshot.tar.gz');
let uploadPromise: Promise<CmdResult>;
beforeEach(() => {
h.createDummyArchive(pr, sha9, archivePath);
uploadPromise = h.runCmd(`${curl} http://${host}/create-build/${pr}/${sha9}`);
});
afterEach(() => h.deletePrDir(pr));
it('should respond with 201', done => {
uploadPromise.then(h.verifyResponse(201)).then(done);
});
it('should extract the contents of the uploaded file', done => {
uploadPromise.
then(() => {
expect(h.readBuildFile(pr, sha9, 'index.html')).toContain(`uploaded/${pr}`);
expect(h.readBuildFile(pr, sha9, 'foo/bar.js')).toContain(`uploaded/${pr}`);
}).
then(done);
});
it(`should create files/directories owned by '${h.serverUser}'`, done => {
const shaDir = path.join(h.buildsDir, pr, sha9);
const idxPath = path.join(shaDir, 'index.html');
const barPath = path.join(shaDir, 'foo', 'bar.js');
uploadPromise.
then(() => Promise.all([
h.runCmd(`find ${shaDir}`),
h.runCmd(`find ${shaDir} -user ${h.serverUser}`),
])).
then(([{stdout: allFiles}, {stdout: userFiles}]) => {
expect(userFiles).toBe(allFiles);
expect(userFiles).toContain(shaDir);
expect(userFiles).toContain(idxPath);
expect(userFiles).toContain(barPath);
}).
then(done);
});
it('should delete the uploaded file', done => {
expect(fs.existsSync(archivePath)).toBe(true);
uploadPromise.
then(() => expect(fs.existsSync(archivePath)).toBe(false)).
then(done);
});
it('should make the build directory non-writable', done => {
const shaDir = path.join(h.buildsDir, pr, sha9);
const idxPath = path.join(shaDir, 'index.html');
const barPath = path.join(shaDir, 'foo', 'bar.js');
// See https://github.com/nodejs/node-v0.x-archive/issues/3045#issuecomment-4862588.
const isNotWritable = (fileOrDir: string) => {
const mode = fs.statSync(fileOrDir).mode;
// tslint:disable-next-line: no-bitwise
return !(mode & parseInt('222', 8));
};
uploadPromise.
then(() => {
expect(isNotWritable(shaDir)).toBe(true);
expect(isNotWritable(idxPath)).toBe(true);
expect(isNotWritable(barPath)).toBe(true);
}).
then(done);
});
});
});
describe(`${host}/health-check`, () => {
it('should respond with 200', done => {
Promise.all([
h.runCmd(`curl -iL http://${host}/health-check`).then(h.verifyResponse(200)),
h.runCmd(`curl -iL http://${host}/health-check/`).then(h.verifyResponse(200)),
]).then(done);
});
it('should respond with 404 if the path does not match exactly', done => {
Promise.all([
h.runCmd(`curl -iL http://${host}/health-check/foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/health-check-foo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/health-checknfoo`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foo/health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foo-health-check`).then(h.verifyResponse(404)),
h.runCmd(`curl -iL http://${host}/foonhealth-check`).then(h.verifyResponse(404)),
]).then(done);
});
});
describe(`${host}/*`, () => {
it('should respond with 404 for GET requests to unknown URLs', done => {
const bodyRegex = /^Unknown resource/;
Promise.all([
h.runCmd(`curl -iL http://${host}/index.html`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL http://${host}/`).then(h.verifyResponse(404, bodyRegex)),
h.runCmd(`curl -iL http://${host}`).then(h.verifyResponse(404, bodyRegex)),
]).then(done);
});
it('should respond with 405 for non-GET requests to any URL', done => {
const bodyRegex = /^Unsupported method/;
Promise.all([
h.runCmd(`curl -iLX PUT http://${host}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX POST http://${host}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX PATCH http://${host}`).then(h.verifyResponse(405, bodyRegex)),
h.runCmd(`curl -iLX DELETE http://${host}`).then(h.verifyResponse(405, bodyRegex)),
]).then(done);
});
});
});

View File

@ -0,0 +1,38 @@
{
"name": "aio-scripts-js",
"version": "1.0.0",
"description": "Performing various tasks on PR build artifacts for angular.io.",
"repository": "https://github.com/angular/angular.git",
"author": "Angular",
"license": "MIT",
"scripts": {
"prebuild": "yarn run clean",
"build": "tsc",
"build-watch": "yarn run tsc -- --watch",
"clean": "node --eval \"require('shelljs').rm('-rf', 'dist')\"",
"dev": "concurrently --kill-others --raw --success first \"yarn run build-watch\" \"yarn run test-watch\"",
"lint": "tslint --project tsconfig.json",
"pretest": "yarn run lint",
"test": "node dist/test",
"test-watch": "nodemon --exec \"yarn test\" --watch dist"
},
"dependencies": {
"express": "^4.14.1",
"jasmine": "^2.5.3",
"shelljs": "^0.7.6"
},
"devDependencies": {
"@types/express": "^4.0.35",
"@types/jasmine": "^2.5.43",
"@types/node": "^7.0.5",
"@types/shelljs": "^0.7.0",
"@types/supertest": "^2.0.0",
"concurrently": "^3.3.0",
"eslint": "^3.15.0",
"eslint-plugin-jasmine": "^2.2.0",
"nodemon": "^1.11.0",
"supertest": "^3.0.0",
"tslint": "^4.4.2",
"typescript": "^2.1.6"
}
}

View File

@ -0,0 +1,310 @@
// Imports
import * as fs from 'fs';
import * as shell from 'shelljs';
import {BuildCleaner} from '../../lib/clean-up/build-cleaner';
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
// Tests
describe('BuildCleaner', () => {
let cleaner: BuildCleaner;
beforeEach(() => cleaner = new BuildCleaner('/foo/bar', 'baz/qux', '12345'));
describe('constructor()', () => {
it('should throw if \'buildsDir\' is empty', () => {
expect(() => new BuildCleaner('', '/baz/qux')).toThrowError('Missing required parameter \'buildsDir\'!');
});
it('should throw if \'repoSlag\' is empty', () => {
expect(() => new BuildCleaner('/foo/bar', '')).toThrowError('Missing required parameter \'repoSlug\'!');
});
});
describe('cleanUp()', () => {
let cleanerGetExistingBuildNumbersSpy: jasmine.Spy;
let cleanerGetOpenPrNumbersSpy: jasmine.Spy;
let cleanerRemoveUnnecessaryBuildsSpy: jasmine.Spy;
let existingBuildsDeferred: {resolve: Function, reject: Function};
let openPrsDeferred: {resolve: Function, reject: Function};
let promise: Promise<void>;
beforeEach(() => {
cleanerGetExistingBuildNumbersSpy = spyOn(cleaner as any, 'getExistingBuildNumbers').and.callFake(() => {
return new Promise((resolve, reject) => existingBuildsDeferred = {resolve, reject});
});
cleanerGetOpenPrNumbersSpy = spyOn(cleaner as any, 'getOpenPrNumbers').and.callFake(() => {
return new Promise((resolve, reject) => openPrsDeferred = {resolve, reject});
});
cleanerRemoveUnnecessaryBuildsSpy = spyOn(cleaner as any, 'removeUnnecessaryBuilds');
promise = cleaner.cleanUp();
});
it('should return a promise', () => {
expect(promise).toEqual(jasmine.any(Promise));
});
it('should get the existing builds', () => {
expect(cleanerGetExistingBuildNumbersSpy).toHaveBeenCalled();
});
it('should get the open PRs', () => {
expect(cleanerGetOpenPrNumbersSpy).toHaveBeenCalled();
});
it('should reject if \'getExistingBuildNumbers()\' rejects', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
existingBuildsDeferred.reject('Test');
});
it('should reject if \'getOpenPrNumbers()\' rejects', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
openPrsDeferred.reject('Test');
});
it('should reject if \'removeUnnecessaryBuilds()\' rejects', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.reject('Test'));
existingBuildsDeferred.resolve();
openPrsDeferred.resolve();
});
it('should pass existing builds and open PRs to \'removeUnnecessaryBuilds()\'', done => {
promise.then(() => {
expect(cleanerRemoveUnnecessaryBuildsSpy).toHaveBeenCalledWith('foo', 'bar');
done();
});
existingBuildsDeferred.resolve('foo');
openPrsDeferred.resolve('bar');
});
it('should resolve with the value returned by \'removeUnnecessaryBuilds()\'', done => {
promise.then(result => {
expect(result).toBe('Test');
done();
});
cleanerRemoveUnnecessaryBuildsSpy.and.returnValue(Promise.resolve('Test'));
existingBuildsDeferred.resolve();
openPrsDeferred.resolve();
});
});
// Protected methods
describe('getExistingBuildNumbers()', () => {
let fsReaddirSpy: jasmine.Spy;
let readdirCb: (err: any, files?: string[]) => void;
let promise: Promise<number[]>;
beforeEach(() => {
fsReaddirSpy = spyOn(fs, 'readdir').and.callFake((_: string, cb: typeof readdirCb) => readdirCb = cb);
promise = (cleaner as any).getExistingBuildNumbers();
});
it('should return a promise', () => {
expect(promise).toEqual(jasmine.any(Promise));
});
it('should get the contents of the builds directory', () => {
expect(fsReaddirSpy).toHaveBeenCalled();
expect(fsReaddirSpy.calls.argsFor(0)[0]).toBe('/foo/bar');
});
it('should reject if an error occurs while getting the files', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
readdirCb('Test');
});
it('should resolve with the returned files (as numbers)', done => {
promise.then(result => {
expect(result).toEqual([12, 34, 56]);
done();
});
readdirCb(null, ['12', '34', '56']);
});
it('should ignore files with non-numeric (or zero) names', done => {
promise.then(result => {
expect(result).toEqual([12, 34, 56]);
done();
});
readdirCb(null, ['12', 'foo', '34', 'bar', '56', '000']);
});
});
describe('getOpenPrNumbers()', () => {
let prDeferred: {resolve: Function, reject: Function};
let promise: Promise<number[]>;
beforeEach(() => {
spyOn(GithubPullRequests.prototype, 'fetchAll').and.callFake(() => {
return new Promise((resolve, reject) => prDeferred = {resolve, reject});
});
promise = (cleaner as any).getOpenPrNumbers();
});
it('should return a promise', () => {
expect(promise).toEqual(jasmine.any(Promise));
});
it('should fetch open PRs via \'GithubPullRequests\'', () => {
expect(GithubPullRequests.prototype.fetchAll).toHaveBeenCalledWith('open');
});
it('should reject if an error occurs while fetching PRs', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
prDeferred.reject('Test');
});
it('should resolve with the numbers of the fetched PRs', done => {
promise.then(prNumbers => {
expect(prNumbers).toEqual([1, 2, 3]);
done();
});
prDeferred.resolve([{id: 0, number: 1}, {id: 1, number: 2}, {id: 2, number: 3}]);
});
});
describe('removeDir()', () => {
let shellChmodSpy: jasmine.Spy;
let shellRmSpy: jasmine.Spy;
beforeEach(() => {
shellChmodSpy = spyOn(shell, 'chmod');
shellRmSpy = spyOn(shell, 'rm');
});
it('should remove the specified directory and its content', () => {
(cleaner as any).removeDir('/foo/bar');
expect(shellRmSpy).toHaveBeenCalledWith('-rf', '/foo/bar');
});
it('should make the directory and its content writable before removing', () => {
shellRmSpy.and.callFake(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a+w', '/foo/bar'));
(cleaner as any).removeDir('/foo/bar');
expect(shellRmSpy).toHaveBeenCalled();
});
it('should catch errors and log them', () => {
const consoleErrorSpy = spyOn(console, 'error');
shellRmSpy.and.callFake(() => { throw 'Test'; });
(cleaner as any).removeDir('/foo/bar');
expect(consoleErrorSpy).toHaveBeenCalled();
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain('Unable to remove \'/foo/bar\'');
expect(consoleErrorSpy.calls.argsFor(0)[1]).toBe('Test');
});
});
describe('removeUnnecessaryBuilds()', () => {
let consoleLogSpy: jasmine.Spy;
let cleanerRemoveDirSpy: jasmine.Spy;
beforeEach(() => {
consoleLogSpy = spyOn(console, 'log');
cleanerRemoveDirSpy = spyOn(cleaner as any, 'removeDir');
});
it('should log the number of existing builds, open PRs and builds to be removed', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], [3, 4, 5, 6]);
expect(console.log).toHaveBeenCalledWith('Existing builds: 3');
expect(console.log).toHaveBeenCalledWith('Open pull requests: 4');
expect(console.log).toHaveBeenCalledWith('Removing 2 build(s): 1, 2');
});
it('should construct full paths to directories (by prepending \'buildsDir\')', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3], []);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
});
it('should remove the builds that do not correspond to open PRs', () => {
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [2, 4]);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(2);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
cleanerRemoveDirSpy.calls.reset();
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], [1, 2, 3, 4]);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(0);
cleanerRemoveDirSpy.calls.reset();
(cleaner as any).removeUnnecessaryBuilds([1, 2, 3, 4], []);
expect(cleanerRemoveDirSpy).toHaveBeenCalledTimes(4);
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/1');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/2');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/3');
expect(cleanerRemoveDirSpy).toHaveBeenCalledWith('/foo/bar/4');
cleanerRemoveDirSpy.calls.reset();
});
});
});

View File

@ -0,0 +1,350 @@
// Imports
import {EventEmitter} from 'events';
import {ClientRequest, IncomingMessage} from 'http';
import * as https from 'https';
import {GithubApi} from '../../lib/common/github-api';
// Tests
describe('GithubApi', () => {
let api: GithubApi;
beforeEach(() => api = new GithubApi('repo/slug', '12345'));
describe('constructor()', () => {
it('should throw if \'repoSlag\' is not defined', () => {
expect(() => new GithubApi('', '12345')).toThrowError('Missing required parameter \'repoSlug\'!');
});
it('should log a warning if \'githubToken\' is not defined or empty', () => {
const warningMessage = 'No GitHub access-token specified. Requests will be unauthenticated.';
const consoleWarnSpy = spyOn(console, 'warn');
/* tslint:disable: no-unused-new */
new GithubApi('repo/slug');
new GithubApi('repo/slug', '');
/* tslint:enable: no-unused-new */
expect(consoleWarnSpy).toHaveBeenCalledTimes(2);
expect(consoleWarnSpy.calls.argsFor(0)[0]).toBe(warningMessage);
expect(consoleWarnSpy.calls.argsFor(1)[0]).toBe(warningMessage);
});
});
describe('get()', () => {
let apiBuildPathSpy: jasmine.Spy;
let apiRequestSpy: jasmine.Spy;
beforeEach(() => {
apiBuildPathSpy = spyOn(api as any, 'buildPath');
apiRequestSpy = spyOn(api as any, 'request');
});
it('should call \'buildPath()\' with the pathname and params', () => {
api.get('/foo', {bar: 'baz'});
expect(apiBuildPathSpy).toHaveBeenCalled();
expect(apiBuildPathSpy.calls.argsFor(0)).toEqual(['/foo', {bar: 'baz'}]);
});
it('should call \'request()\' with the correct method', () => {
api.get('/foo');
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[0]).toBe('get');
});
it('should call \'request()\' with the correct path', () => {
apiBuildPathSpy.and.returnValue('/foo/bar');
api.get('foo');
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[1]).toBe('/foo/bar');
});
it('should not pass data to \'request()\'', () => {
(api.get as Function)('foo', {}, {});
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[2]).toBeUndefined();
});
});
describe('post()', () => {
let apiBuildPathSpy: jasmine.Spy;
let apiRequestSpy: jasmine.Spy;
beforeEach(() => {
apiBuildPathSpy = spyOn(api as any, 'buildPath');
apiRequestSpy = spyOn(api as any, 'request');
});
it('should call \'buildPath()\' with the pathname and params', () => {
api.post('/foo', {bar: 'baz'});
expect(apiBuildPathSpy).toHaveBeenCalled();
expect(apiBuildPathSpy.calls.argsFor(0)).toEqual(['/foo', {bar: 'baz'}]);
});
it('should call \'request()\' with the correct method', () => {
api.post('/foo');
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[0]).toBe('post');
});
it('should call \'request()\' with the correct path', () => {
apiBuildPathSpy.and.returnValue('/foo/bar');
api.post('/foo');
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[1]).toBe('/foo/bar');
});
it('should pass the data to \'request()\'', () => {
api.post('/foo', {}, {bar: 'baz'});
expect(apiRequestSpy).toHaveBeenCalled();
expect(apiRequestSpy.calls.argsFor(0)[2]).toEqual({bar: 'baz'});
});
});
// Protected methods
describe('buildPath()', () => {
it('should return the pathname if no params', () => {
expect((api as any).buildPath('/foo')).toBe('/foo');
expect((api as any).buildPath('/foo', undefined)).toBe('/foo');
expect((api as any).buildPath('/foo', null)).toBe('/foo');
});
it('should append the params to the pathname', () => {
expect((api as any).buildPath('/foo', {bar: 'baz'})).toBe('/foo?bar=baz');
});
it('should join the params with \'&\'', () => {
expect((api as any).buildPath('/foo', {bar: 1, baz: 2})).toBe('/foo?bar=1&baz=2');
});
it('should ignore undefined/null params', () => {
expect((api as any).buildPath('/foo', {bar: undefined, baz: null})).toBe('/foo');
});
it('should encode param values as URI components', () => {
expect((api as any).buildPath('/foo', {bar: 'b a&z'})).toBe('/foo?bar=b%20a%26z');
});
});
describe('request()', () => {
let httpsRequestSpy: jasmine.Spy;
let latestRequest: ClientRequest;
beforeEach(() => {
const originalRequest = https.request;
httpsRequestSpy = spyOn(https, 'request').and.callFake((...args: any[]) => {
latestRequest = originalRequest.apply(https, args);
spyOn(latestRequest, 'on').and.callThrough();
spyOn(latestRequest, 'end');
return latestRequest;
});
});
it('should return a promise', () => {
expect((api as any).request()).toEqual(jasmine.any(Promise));
});
it('should call \'https.request()\' with the correct options', () => {
(api as any).request('method', 'path');
expect(httpsRequestSpy).toHaveBeenCalled();
expect(httpsRequestSpy.calls.argsFor(0)[0]).toEqual(jasmine.objectContaining({
headers: jasmine.objectContaining({
'User-Agent': `Node/${process.versions.node}`,
}),
host: 'api.github.com',
method: 'method',
path: 'path',
}));
});
it('should call specify an \'Authorization\' header if \'githubToken\' is present', () => {
(api as any).request('method', 'path');
expect(httpsRequestSpy).toHaveBeenCalled();
expect(httpsRequestSpy.calls.argsFor(0)[0].headers).toEqual(jasmine.objectContaining({
Authorization: 'token 12345',
}));
});
it('should reject on request error', done => {
(api as any).request('method', 'path').catch((err: any) => {
expect(err).toBe('Test');
done();
});
latestRequest.emit('error', 'Test');
});
it('should send the request (i.e. call \'end()\')', () => {
(api as any).request('method', 'path');
expect(latestRequest.end).toHaveBeenCalled();
});
it('should \'JSON.stringify\' and send the data along with the request', () => {
(api as any).request('method', 'path');
expect(latestRequest.end).toHaveBeenCalledWith(null);
(api as any).request('method', 'path', {key: 'value'});
expect(latestRequest.end).toHaveBeenCalledWith('{"key":"value"}');
});
describe('onResponse', () => {
let promise: Promise<void>;
let respond: (statusCode: number) => IncomingMessage;
beforeEach(() => {
promise = (api as any).request('method', 'path');
respond = (statusCode: number) => {
const mockResponse = new EventEmitter() as IncomingMessage;
mockResponse.statusCode = statusCode;
const onResponse = httpsRequestSpy.calls.argsFor(0)[1];
onResponse(mockResponse);
return mockResponse;
};
});
it('should reject on response error', done => {
promise.catch(err => {
expect(err).toBe('Test');
done();
});
const res = respond(200);
res.emit('error', 'Test');
});
it('should reject if returned statusCode is <200', done => {
promise.catch(err => {
expect(err).toContain('failed');
expect(err).toContain('status: 199');
done();
});
const res = respond(199);
res.emit('end');
});
it('should reject if returned statusCode is >=400', done => {
promise.catch(err => {
expect(err).toContain('failed');
expect(err).toContain('status: 400');
done();
});
const res = respond(400);
res.emit('end');
});
it('should include the response text in the rejection message', done => {
promise.catch(err => {
expect(err).toContain('Test');
done();
});
const res = respond(500);
res.emit('data', 'Test');
res.emit('end');
});
it('should resolve if returned statusCode is <=200 <400', done => {
promise.then(done);
const res = respond(200);
res.emit('data', '{}');
res.emit('end');
});
it('should resolve with the response text \'JSON.parsed\'', done => {
promise.then(data => {
expect(data).toEqual({foo: 'bar'});
done();
});
const res = respond(300);
res.emit('data', '{"foo":"bar"}');
res.emit('end');
});
it('should collect and concatenate the whole response text', done => {
promise.then(data => {
expect(data).toEqual({foo: 'bar', baz: 'qux'});
done();
});
const res = respond(300);
res.emit('data', '{"foo":');
res.emit('data', '"bar","baz"');
res.emit('data', ':"qux"}');
res.emit('end');
});
it('should reject if the response text is malformed JSON', done => {
promise.catch(err => {
expect(err).toEqual(jasmine.any(SyntaxError));
done();
});
const res = respond(300);
res.emit('data', '}');
res.emit('end');
});
});
});
});

View File

@ -0,0 +1,172 @@
// Imports
import {GithubPullRequests} from '../../lib/common/github-pull-requests';
// Tests
describe('GithubPullRequests', () => {
describe('constructor()', () => {
it('should log a warning if \'githubToken\' is not defined', () => {
const warningMessage = 'No GitHub access-token specified. Requests will be unauthenticated.';
const consoleWarnSpy = spyOn(console, 'warn');
// tslint:disable-next-line: no-unused-new
new GithubPullRequests('repo/slug');
expect(consoleWarnSpy).toHaveBeenCalledWith(warningMessage);
});
it('should throw if \'repoSlag\' is not defined', () => {
expect(() => new GithubPullRequests('', '12345')).toThrowError('Missing required parameter \'repoSlug\'!');
});
});
describe('addComment()', () => {
let prs: GithubPullRequests;
let deferred: {resolve: Function, reject: Function};
beforeEach(() => {
prs = new GithubPullRequests('foo/bar', '12345');
spyOn(prs, 'post').and.callFake(() => new Promise((resolve, reject) => deferred = {resolve, reject}));
});
it('should return a promise', () => {
expect(prs.addComment(42, 'body')).toEqual(jasmine.any(Promise));
});
it('should throw if the PR number is invalid', () => {
expect(() => prs.addComment(-1337, 'body')).toThrowError(`Invalid PR number: -1337`);
expect(() => prs.addComment(NaN, 'body')).toThrowError(`Invalid PR number: NaN`);
});
it('should throw if the comment body is invalid or empty', () => {
expect(() => prs.addComment(42, '')).toThrowError(`Invalid or empty comment body: `);
});
it('should call \'post()\' with the correct pathname, params and data', () => {
prs.addComment(42, 'body');
expect(prs.post).toHaveBeenCalledWith('/repos/foo/bar/issues/42/comments', null, {body: 'body'});
});
it('should reject if the request fails', done => {
prs.addComment(42, 'body').catch(err => {
expect(err).toBe('Test');
done();
});
deferred.reject('Test');
});
it('should resolve with the returned response', done => {
prs.addComment(42, 'body').then(data => {
expect(data).toEqual('Test');
done();
});
deferred.resolve('Test');
});
});
describe('fetchAll()', () => {
let prs: GithubPullRequests;
let deferreds: {resolve: Function, reject: Function}[];
beforeEach(() => {
prs = new GithubPullRequests('foo/bar', '12345');
deferreds = [];
spyOn(process.stdout, 'write');
spyOn(prs, 'get').and.callFake(() => new Promise((resolve, reject) => deferreds.push({resolve, reject})));
});
it('should return a promise', () => {
expect(prs.fetchAll()).toEqual(jasmine.any(Promise));
});
it('should call \'get()\' with the correct pathname and params', () => {
prs.fetchAll('open');
expect(prs.get).toHaveBeenCalledWith('/repos/foo/bar/pulls', {
page: 0,
per_page: 100,
state: 'open',
});
});
it('should default to \'all\' if no state is specified', () => {
prs.fetchAll();
expect(prs.get).toHaveBeenCalledWith('/repos/foo/bar/pulls', {
page: 0,
per_page: 100,
state: 'all',
});
});
it('should reject if the request fails', done => {
prs.fetchAll().catch(err => {
expect(err).toBe('Test');
done();
});
deferreds[0].reject('Test');
});
it('should resolve with the returned pull requests', done => {
const pullRequests = [{id: 1}, {id: 2}];
prs.fetchAll().then(data => {
expect(data).toEqual(pullRequests);
done();
});
deferreds[0].resolve(pullRequests);
});
it('should iteratively call \'get()\' to fetch all pull requests', done => {
// Create an array or 250 objects.
const allPullRequests = '.'.repeat(250).split('').map((_, i) => ({id: i}));
const prsGetApy = prs.get as jasmine.Spy;
prs.fetchAll().then(data => {
const paramsForPage = (page: number) => ({page, per_page: 100, state: 'all'});
expect(prsGetApy).toHaveBeenCalledTimes(3);
expect(prsGetApy.calls.argsFor(0)).toEqual(['/repos/foo/bar/pulls', paramsForPage(0)]);
expect(prsGetApy.calls.argsFor(1)).toEqual(['/repos/foo/bar/pulls', paramsForPage(1)]);
expect(prsGetApy.calls.argsFor(2)).toEqual(['/repos/foo/bar/pulls', paramsForPage(2)]);
expect(data).toEqual(allPullRequests);
done();
});
deferreds[0].resolve(allPullRequests.slice(0, 100));
setTimeout(() => {
deferreds[1].resolve(allPullRequests.slice(100, 200));
setTimeout(() => deferreds[2].resolve(allPullRequests.slice(200)), 0);
}, 0);
});
});
});

View File

@ -0,0 +1,62 @@
// Imports
import {getEnvVar} from '../../lib/common/utils';
// Tests
describe('utils', () => {
describe('getEnvVar()', () => {
const emptyVar = '$$test_utils_getEnvVar_empty$$';
const nonEmptyVar = '$$test_utils_getEnvVar_nonEmpty$$';
const undefinedVar = '$$test_utils_getEnvVar_undefined$$';
beforeEach(() => {
process.env[emptyVar] = '';
process.env[nonEmptyVar] = 'foo';
});
afterEach(() => {
delete process.env[emptyVar];
delete process.env[nonEmptyVar];
});
it('should return an environment variable', () => {
expect(getEnvVar(nonEmptyVar)).toBe('foo');
});
it('should exit with an error if the environment variable is not defined', () => {
const consoleErrorSpy = spyOn(console, 'error');
const processExitSpy = spyOn(process, 'exit');
getEnvVar(undefinedVar);
expect(consoleErrorSpy).toHaveBeenCalled();
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain(undefinedVar);
expect(processExitSpy).toHaveBeenCalledWith(1);
});
it('should exit with an error if the environment variable is empty', () => {
const consoleErrorSpy = spyOn(console, 'error');
const processExitSpy = spyOn(process, 'exit');
getEnvVar(emptyVar);
expect(consoleErrorSpy).toHaveBeenCalled();
expect(consoleErrorSpy.calls.argsFor(0)[0]).toContain(emptyVar);
expect(processExitSpy).toHaveBeenCalledWith(1);
});
it('should return an empty string if an undefined variable is optional', () => {
expect(getEnvVar(undefinedVar, true)).toBe('');
});
it('should return an empty string if an empty variable is optional', () => {
expect(getEnvVar(emptyVar, true)).toBe('');
});
});
});

View File

@ -0,0 +1,6 @@
declare namespace jasmine {
export interface DoneFn extends Function {
(): void;
fail: (message: Error | string) => void;
}
}

View File

@ -0,0 +1,7 @@
// Imports
import {runTests} from '../lib/common/run-tests';
// Run
const specFiles = [`${__dirname}/**/*.spec.js`];
const helpers = [`${__dirname}/helpers.js`];
runTests(specFiles, helpers);

View File

@ -0,0 +1,330 @@
// Imports
import * as cp from 'child_process';
import {EventEmitter} from 'events';
import * as fs from 'fs';
import * as shell from 'shelljs';
import {BuildCreator} from '../../lib/upload-server/build-creator';
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
import {UploadError} from '../../lib/upload-server/upload-error';
// Tests
describe('BuildCreator', () => {
const pr = '9';
const sha = '9'.repeat(40);
const archive = 'snapshot.tar.gz';
const buildsDir = 'build/dir';
const prDir = `${buildsDir}/${pr}`;
const shaDir = `${prDir}/${sha}`;
let bc: BuildCreator;
// Helpers
const expectToBeUploadError = (actual: UploadError, expStatus?: number, expMessage?: string) => {
expect(actual).toEqual(jasmine.any(UploadError));
if (expStatus != null) {
expect(actual.status).toBe(expStatus);
}
if (expMessage != null) {
expect(actual.message).toBe(expMessage);
}
};
beforeEach(() => bc = new BuildCreator(buildsDir));
describe('constructor()', () => {
it('should throw if \'buildsDir\' is missing or empty', () => {
expect(() => new BuildCreator('')).toThrowError('Missing or empty required parameter \'buildsDir\'!');
});
it('should extend EventEmitter', () => {
expect(bc).toEqual(jasmine.any(BuildCreator));
expect(bc).toEqual(jasmine.any(EventEmitter));
expect(Object.getPrototypeOf(bc)).toBe(BuildCreator.prototype);
});
});
describe('create()', () => {
let bcEmitSpy: jasmine.Spy;
let bcExistsSpy: jasmine.Spy;
let bcExtractArchiveSpy: jasmine.Spy;
let shellMkdirSpy: jasmine.Spy;
let shellRmSpy: jasmine.Spy;
beforeEach(() => {
bcEmitSpy = spyOn(bc, 'emit');
bcExistsSpy = spyOn(bc as any, 'exists');
bcExtractArchiveSpy = spyOn(bc as any, 'extractArchive');
shellMkdirSpy = spyOn(shell, 'mkdir');
shellRmSpy = spyOn(shell, 'rm');
});
it('should return a promise', done => {
const promise = bc.create(pr, sha, archive);
promise.then(done); // Do not complete the test (and release the spies) synchronously
// to avoid running the actual `extractArchive()`.
expect(promise).toEqual(jasmine.any(Promise));
});
it('should throw if the build does already exist', done => {
bcExistsSpy.and.returnValue(true);
bc.create(pr, sha, archive).catch(err => {
expectToBeUploadError(err, 403, `Request to overwrite existing directory: ${shaDir}`);
done();
});
});
it('should create the build directory (and any parent directories)', done => {
bc.create(pr, sha, archive).
then(() => expect(shellMkdirSpy).toHaveBeenCalledWith('-p', shaDir)).
then(done);
});
it('should extract the archive contents into the build directory', done => {
bc.create(pr, sha, archive).
then(() => expect(bcExtractArchiveSpy).toHaveBeenCalledWith(archive, shaDir)).
then(done);
});
it('should emit a CreatedBuildEvent on success', done => {
let emitted = false;
bcEmitSpy.and.callFake((type: string, evt: CreatedBuildEvent) => {
expect(type).toBe(CreatedBuildEvent.type);
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
expect(evt.pr).toBe(+pr);
expect(evt.sha).toBe(sha);
emitted = true;
});
bc.create(pr, sha, archive).
then(() => expect(emitted).toBe(true)).
then(done);
});
describe('on error', () => {
it('should abort and skip further operations if it fails to create the directories', done => {
shellMkdirSpy.and.throwError('');
bc.create(pr, sha, archive).catch(() => {
expect(shellMkdirSpy).toHaveBeenCalled();
expect(bcExtractArchiveSpy).not.toHaveBeenCalled();
expect(bcEmitSpy).not.toHaveBeenCalled();
done();
});
});
it('should abort and skip further operations if it fails to extract the archive', done => {
bcExtractArchiveSpy.and.throwError('');
bc.create(pr, sha, archive).catch(() => {
expect(shellMkdirSpy).toHaveBeenCalled();
expect(bcExtractArchiveSpy).toHaveBeenCalled();
expect(bcEmitSpy).not.toHaveBeenCalled();
done();
});
});
it('should delete the PR directory (for new PR)', done => {
bcExtractArchiveSpy.and.throwError('');
bc.create(pr, sha, archive).catch(() => {
expect(shellRmSpy).toHaveBeenCalledWith('-rf', prDir);
done();
});
});
it('should delete the SHA directory (for existing PR)', done => {
bcExistsSpy.and.callFake((path: string) => path !== shaDir);
bcExtractArchiveSpy.and.throwError('');
bc.create(pr, sha, archive).catch(() => {
expect(shellRmSpy).toHaveBeenCalledWith('-rf', shaDir);
done();
});
});
it('should reject with an UploadError', done => {
shellMkdirSpy.and.callFake(() => {throw 'Test'; });
bc.create(pr, sha, archive).catch(err => {
expectToBeUploadError(err, 500, `Error while uploading to directory: ${shaDir}\nTest`);
done();
});
});
it('should pass UploadError instances unmodified', done => {
shellMkdirSpy.and.callFake(() => { throw new UploadError(543, 'Test'); });
bc.create(pr, sha, archive).catch(err => {
expectToBeUploadError(err, 543, 'Test');
done();
});
});
});
});
// Protected methods
describe('exists()', () => {
let fsAccessSpy: jasmine.Spy;
let fsAccessCbs: Function[];
beforeEach(() => {
fsAccessCbs = [];
fsAccessSpy = spyOn(fs, 'access').and.callFake((_: string, cb: Function) => fsAccessCbs.push(cb));
});
it('should return a promise', () => {
expect((bc as any).exists('foo')).toEqual(jasmine.any(Promise));
});
it('should call \'fs.access()\' with the specified argument', () => {
(bc as any).exists('foo');
expect(fs.access).toHaveBeenCalledWith('foo', jasmine.any(Function));
});
it('should resolve with \'true\' if \'fs.access()\' succeeds', done => {
Promise.
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
then(results => expect(results).toEqual([true, true])).
then(done);
fsAccessCbs[0]();
fsAccessCbs[1](null);
});
it('should resolve with \'false\' if \'fs.access()\' errors', done => {
Promise.
all([(bc as any).exists('foo'), (bc as any).exists('bar')]).
then(results => expect(results).toEqual([false, false])).
then(done);
fsAccessCbs[0]('Error');
fsAccessCbs[1](new Error());
});
});
describe('extractArchive()', () => {
let consoleWarnSpy: jasmine.Spy;
let shellChmodSpy: jasmine.Spy;
let shellRmSpy: jasmine.Spy;
let cpExecSpy: jasmine.Spy;
let cpExecCbs: Function[];
beforeEach(() => {
cpExecCbs = [];
consoleWarnSpy = spyOn(console, 'warn');
shellChmodSpy = spyOn(shell, 'chmod');
shellRmSpy = spyOn(shell, 'rm');
cpExecSpy = spyOn(cp, 'exec').and.callFake((_: string, cb: Function) => cpExecCbs.push(cb));
});
it('should return a promise', () => {
expect((bc as any).extractArchive('foo', 'bar')).toEqual(jasmine.any(Promise));
});
it('should "gunzip" and "untar" the input file into the output directory', () => {
const cmd = 'tar --extract --gzip --directory "output/dir" --file "input/file"';
(bc as any).extractArchive('input/file', 'output/dir');
expect(cpExecSpy).toHaveBeenCalledWith(cmd, jasmine.any(Function));
});
it('should log (as a warning) any stderr output if extracting succeeded', done => {
(bc as any).extractArchive('foo', 'bar').
then(() => expect(consoleWarnSpy).toHaveBeenCalledWith('This is stderr')).
then(done);
cpExecCbs[0](null, 'This is stdout', 'This is stderr');
});
it('should make the build directory non-writable', done => {
(bc as any).extractArchive('foo', 'bar').
then(() => expect(shellChmodSpy).toHaveBeenCalledWith('-R', 'a-w', 'bar')).
then(done);
cpExecCbs[0]();
});
it('should delete the uploaded file on success', done => {
(bc as any).extractArchive('input/file', 'output/dir').
then(() => expect(shellRmSpy).toHaveBeenCalledWith('-f', 'input/file')).
then(done);
cpExecCbs[0]();
});
describe('on error', () => {
it('should abort and skip further operations if it fails to extract the archive', done => {
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
expect(shellChmodSpy).not.toHaveBeenCalled();
expect(shellRmSpy).not.toHaveBeenCalled();
expect(err).toBe('Test');
done();
});
cpExecCbs[0]('Test');
});
it('should abort and skip further operations if it fails to make non-writable', done => {
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
expect(shellChmodSpy).toHaveBeenCalled();
expect(shellRmSpy).not.toHaveBeenCalled();
expect(err).toBe('Test');
done();
});
shellChmodSpy.and.callFake(() => { throw 'Test'; });
cpExecCbs[0]();
});
it('should abort and reject if it fails to remove the uploaded file', done => {
(bc as any).extractArchive('foo', 'bar').catch((err: any) => {
expect(shellChmodSpy).toHaveBeenCalled();
expect(shellRmSpy).toHaveBeenCalled();
expect(err).toBe('Test');
done();
});
shellRmSpy.and.callFake(() => { throw 'Test'; });
cpExecCbs[0]();
});
});
});
});

View File

@ -0,0 +1,61 @@
// Imports
import {BuildEvent, CreatedBuildEvent} from '../../lib/upload-server/build-events';
// Tests
describe('BuildEvent', () => {
let evt: BuildEvent;
beforeEach(() => evt = new BuildEvent('foo', 42, 'bar'));
it('should have a \'type\' property', () => {
expect(evt.type).toBe('foo');
});
it('should have a \'pr\' property', () => {
expect(evt.pr).toBe(42);
});
it('should have a \'sha\' property', () => {
expect(evt.sha).toBe('bar');
});
});
describe('CreatedBuildEvent', () => {
let evt: CreatedBuildEvent;
beforeEach(() => evt = new CreatedBuildEvent(42, 'bar'));
it('should have a static \'type\' property', () => {
expect(CreatedBuildEvent.type).toBe('build.created');
});
it('should extend BuildEvent', () => {
expect(evt).toEqual(jasmine.any(CreatedBuildEvent));
expect(evt).toEqual(jasmine.any(BuildEvent));
expect(Object.getPrototypeOf(evt)).toBe(CreatedBuildEvent.prototype);
});
it('should automatically set the \'type\'', () => {
expect(evt.type).toBe(CreatedBuildEvent.type);
});
it('should have a \'pr\' property', () => {
expect(evt.pr).toBe(42);
});
it('should have a \'sha\' property', () => {
expect(evt.sha).toBe('bar');
});
});

View File

@ -0,0 +1,39 @@
// Imports
import {UploadError} from '../../lib/upload-server/upload-error';
// Tests
describe('UploadError', () => {
let err: UploadError;
beforeEach(() => err = new UploadError(999, 'message'));
it('should extend Error', () => {
expect(err).toEqual(jasmine.any(UploadError));
expect(err).toEqual(jasmine.any(Error));
expect(Object.getPrototypeOf(err)).toBe(UploadError.prototype);
});
it('should have a \'status\' property', () => {
expect(err.status).toBe(999);
});
it('should have a \'message\' property', () => {
expect(err.message).toBe('message');
});
it('should have a 500 \'status\' by default', () => {
expect(new UploadError().status).toBe(500);
});
it('should have an empty \'message\' by default', () => {
expect(new UploadError().message).toBe('');
expect(new UploadError(999).message).toBe('');
});
});

View File

@ -0,0 +1,262 @@
// Imports
import * as express from 'express';
import * as http from 'http';
import * as supertest from 'supertest';
import {BuildCreator} from '../../lib/upload-server/build-creator';
import {CreatedBuildEvent} from '../../lib/upload-server/build-events';
import {uploadServerFactory as usf} from '../../lib/upload-server/upload-server-factory';
// Tests
describe('uploadServerFactory', () => {
describe('create()', () => {
let usfCreateMiddlewareSpy: jasmine.Spy;
beforeEach(() => {
usfCreateMiddlewareSpy = spyOn(usf as any, 'createMiddleware').and.callThrough();
});
it('should throw if \'buildsDir\' is empty', () => {
expect(() => usf.create('')).toThrowError('Missing or empty required parameter \'buildsDir\'!');
});
it('should return an http.Server', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
const server = usf.create('builds/dir');
expect(server).toBe(httpCreateServerSpy.calls.mostRecent().returnValue);
});
it('should create and use an appropriate middleware', () => {
const httpCreateServerSpy = spyOn(http, 'createServer').and.callThrough();
usf.create('builds/dir');
const middleware: express.Express = usfCreateMiddlewareSpy.calls.mostRecent().returnValue;
expect(usfCreateMiddlewareSpy).toHaveBeenCalledWith(jasmine.any(BuildCreator));
expect(httpCreateServerSpy).toHaveBeenCalledWith(middleware);
});
it('should pass \'buildsDir\' to the created BuildCreator', () => {
usf.create('builds/dir');
const buildCreator: BuildCreator = usfCreateMiddlewareSpy.calls.argsFor(0)[0];
expect((buildCreator as any).buildsDir).toBe('builds/dir');
});
it('should pass CreatedBuildEvents emitted on BuildCreator through to the server', done => {
const server = usf.create('builds/dir');
const buildCreator: BuildCreator = usfCreateMiddlewareSpy.calls.argsFor(0)[0];
const evt = new CreatedBuildEvent(42, 'foo');
server.on(CreatedBuildEvent.type, (data: CreatedBuildEvent) => {
expect(data).toBe(evt);
done();
});
buildCreator.emit(CreatedBuildEvent.type, evt);
});
it('should log the server address info on \'listening\'', () => {
const consoleInfoSpy = spyOn(console, 'info');
const server = usf.create('builds/dir');
server.address = () => ({address: 'foo', family: '', port: 1337});
expect(consoleInfoSpy).not.toHaveBeenCalled();
server.emit('listening');
expect(consoleInfoSpy).toHaveBeenCalledWith('Up and running (and listening on foo:1337)...');
});
});
// Protected methods
describe('createMiddleware()', () => {
let buildCreator: BuildCreator;
let agent: supertest.SuperTest<supertest.Test>;
// Helpers
const promisifyRequest = (req: supertest.Request) =>
new Promise((resolve, reject) => req.end(err => err ? reject(err) : resolve()));
const verifyRequests = (reqs: supertest.Request[], done: jasmine.DoneFn) =>
Promise.all(reqs.map(promisifyRequest)).then(done, done.fail);
beforeEach(() => {
buildCreator = new BuildCreator('builds/dir');
agent = supertest.agent((usf as any).createMiddleware(buildCreator));
spyOn(console, 'error');
});
describe('GET /create-build/<pr>/<sha>', () => {
const pr = '9';
const sha = '9'.repeat(40);
let buildCreatorCreateSpy: jasmine.Spy;
let deferred: {resolve: Function, reject: Function};
beforeEach(() => {
const promise = new Promise((resolve, reject) => deferred = {resolve, reject});
promise.catch(() => null); // Avoid "unhandled rejection" warnings.
buildCreatorCreateSpy = spyOn(buildCreator, 'create').and.returnValue(promise);
});
it('should respond with 405 for non-GET requests', done => {
verifyRequests([
agent.put(`/create-build/${pr}/${sha}`).expect(405),
agent.post(`/create-build/${pr}/${sha}`).expect(405),
agent.patch(`/create-build/${pr}/${sha}`).expect(405),
agent.delete(`/create-build/${pr}/${sha}`).expect(405),
], done);
});
it('should respond with 400 for requests without an \'X-FILE\' header', done => {
const url = `/create-build/${pr}/${sha}`;
const responseBody = `Missing or empty 'X-FILE' header in request: GET ${url}`;
verifyRequests([
agent.get(url).expect(400, responseBody),
agent.get(url).field('X-FILE', '').expect(400, responseBody),
], done);
});
it('should respond with 404 for unknown paths', done => {
verifyRequests([
agent.get(`/foo/create-build/${pr}/${sha}`).expect(404),
agent.get(`/foo-create-build/${pr}/${sha}`).expect(404),
agent.get(`/fooncreate-build/${pr}/${sha}`).expect(404),
agent.get(`/create-build/foo/${pr}/${sha}`).expect(404),
agent.get(`/create-build-foo/${pr}/${sha}`).expect(404),
agent.get(`/create-buildnfoo/${pr}/${sha}`).expect(404),
agent.get(`/create-build/pr${pr}/${sha}`).expect(404),
agent.get(`/create-build/${pr}/${sha}42`).expect(404),
], done);
});
it('should propagate errors from BuildCreator', done => {
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('X-FILE', 'foo').
expect(500, 'Test');
verifyRequests([req], done);
deferred.reject('Test');
});
it('should respond with 201 on successful upload', done => {
const req = agent.
get(`/create-build/${pr}/${sha}`).
set('X-FILE', 'foo').
expect(201, http.STATUS_CODES[201]);
verifyRequests([req], done);
deferred.resolve();
});
it('should call \'BuildCreator#create()\' with appropriate arguments', done => {
promisifyRequest(agent.get(`/create-build/${pr}/${sha}`).set('X-FILE', 'foo').expect(201)).
then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha, 'foo')).
then(done, done.fail);
deferred.resolve();
});
it('should reject PRs with leading zeros', done => {
verifyRequests([agent.get(`/create-build/0${pr}/${sha}`).expect(404)], done);
});
it('should accept SHAs with leading zeros (but not ignore them)', done => {
const sha40 = '0'.repeat(40);
const sha41 = `0${sha40}`;
Promise.all([
promisifyRequest(agent.get(`/create-build/${pr}/${sha41}`).expect(404)),
promisifyRequest(agent.get(`/create-build/${pr}/${sha40}`).set('X-FILE', 'foo')).
then(() => expect(buildCreatorCreateSpy).toHaveBeenCalledWith(pr, sha40, 'foo')),
]).then(done, done.fail);
deferred.resolve();
});
});
describe('GET /health-check', () => {
it('should respond with 200', done => {
verifyRequests([
agent.get('/health-check').expect(200),
agent.get('/health-check/').expect(200),
], done);
});
it('should respond with 405 for non-GET requests', done => {
verifyRequests([
agent.put('/health-check').expect(405),
agent.post('/health-check').expect(405),
agent.patch('/health-check').expect(405),
agent.delete('/health-check').expect(405),
], done);
});
it('should respond with 404 if the path does not match exactly', done => {
verifyRequests([
agent.get('/health-check/foo').expect(404),
agent.get('/health-check-foo').expect(404),
agent.get('/health-checknfoo').expect(404),
agent.get('/foo/health-check').expect(404),
agent.get('/foo-health-check').expect(404),
agent.get('/foonhealth-check').expect(404),
], done);
});
});
describe('GET *', () => {
it('should respond with 404', done => {
const responseBody = 'Unknown resource in request: GET /some/url';
verifyRequests([agent.get('/some/url').expect(404, responseBody)], done);
});
});
describe('ALL *', () => {
it('should respond with 405', done => {
const responseFor = (method: string) => `Unsupported method in request: ${method.toUpperCase()} /some/url`;
verifyRequests([
agent.put('/some/url').expect(405, responseFor('put')),
agent.post('/some/url').expect(405, responseFor('post')),
agent.patch('/some/url').expect(405, responseFor('patch')),
agent.delete('/some/url').expect(405, responseFor('delete')),
], done);
});
});
});
});

View File

@ -0,0 +1,28 @@
{
"compilerOptions": {
"alwaysStrict": true,
"forceConsistentCasingInFileNames": true,
"inlineSourceMap": true,
"lib": [
"es2016"
],
"noImplicitAny": true,
"noImplicitReturns": true,
"noImplicitThis": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"outDir": "dist",
"pretty": true,
"rootDir": ".",
"skipLibCheck": true,
"strictNullChecks": true,
"target": "es5",
"typeRoots": [
"node_modules/@types"
]
},
"include": [
"lib/**/*",
"test/**/*"
]
}

View File

@ -0,0 +1,15 @@
{
"extends": "tslint:recommended",
"rules": {
"array-type": [true, "array"],
"arrow-parens": [true, "ban-single-arg-parens"],
"interface-name": [true, "never-prefix"],
"max-classes-per-file": [true, 4],
"no-consecutive-blank-lines": [true, 2],
"no-console": false,
"no-namespace": [true, "allow-declarations"],
"no-string-literal": false,
"quotemark": [true, "single"],
"variable-name": [true, "ban-keywords", "check-format", "allow-leading-underscore"]
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,4 @@
#!/bin/bash
set -e -o pipefail
node $AIO_SCRIPTS_JS_DIR/dist/lib/clean-up >> /var/log/aio/clean-up.log 2>&1

View File

@ -0,0 +1,43 @@
#!/bin/bash
set +e -o pipefail
# Variables
exitCode=0
# Helpers
function reportStatus {
local lastExitCode=$?
echo "$1: $([[ $lastExitCode -eq 0 ]] && echo OK || echo NOT OK)"
[[ $lastExitCode -eq 0 ]] || exitCode=1
}
# Check services
services=(
rsyslog
cron
nginx
pm2
)
for s in ${services[@]}; do
service $s status > /dev/null
reportStatus "Service '$s'"
done
# Check servers
origins=(
http://$AIO_UPLOAD_HOSTNAME:$AIO_UPLOAD_PORT
http://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTP
https://$AIO_NGINX_HOSTNAME:$AIO_NGINX_PORT_HTTPS
)
for o in ${origins[@]}; do
curl --fail --silent $o/health-check > /dev/null
reportStatus "Server '$o'"
done
# Exit
exit $exitCode

View File

@ -0,0 +1,15 @@
#!/bin/bash
set -e -o pipefail
exec >> /var/log/aio/init.log
exec 2>&1
# Start the services
echo [`date`] - Starting services...
service rsyslog start
service cron start
service dnsmasq start
service nginx start
service pm2 start
aio-upload-server-prod start
echo [`date`] - Services started successfully.

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -e -o pipefail
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
# (Currently, there doesn't seem to be a straight forward way.)
action=$([ "$1" == "stop" ] && echo "stop" || echo "start")
pm2 $action $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server \
--log /var/log/aio/upload-server-prod.log \
--name aio-upload-server-prod \
${@:2}

View File

@ -0,0 +1,23 @@
#!/bin/bash
set -e -o pipefail
# Set up env variables for testing
export AIO_BUILDS_DIR=$TEST_AIO_BUILDS_DIR
export AIO_GITHUB_TOKEN=$TEST_AIO_GITHUB_TOKEN
export AIO_REPO_SLUG=$TEST_AIO_REPO_SLUG
export AIO_UPLOAD_HOSTNAME=$TEST_AIO_UPLOAD_HOSTNAME
export AIO_UPLOAD_PORT=$TEST_AIO_UPLOAD_PORT
# Start the upload-server instance
# TODO(gkalpak): Ideally, the upload server should be run as a non-privileged user.
# (Currently, there doesn't seem to be a straight forward way.)
appName=aio-upload-server-test
if [[ "$1" == "stop" ]]; then
pm2 delete $appName
else
pm2 start $AIO_SCRIPTS_JS_DIR/dist/lib/upload-server \
--log /var/log/aio/upload-server-test.log \
--name $appName \
--no-autorestart \
${@:2}
fi

View File

@ -0,0 +1,39 @@
#!/bin/bash
set -e -o pipefail
logFile=/var/log/aio/verify-setup.log
exec 3>&1
exec >> $logFile
exec 2>&1
echo "[`date`] - Starting verification..."
# Helpers
function countdown {
message=$1
secs=$2
while [ $secs -gt 0 ]; do
echo -ne "$message in $secs...\033[0K\r"
sleep 1
: $((secs--))
done
echo -ne "\033[0K\r"
}
function onExit {
aio-upload-server-test stop
echo -e "Full logs in '$logFile'.\n" > /dev/fd/3
}
# Setup EXIT trap
trap 'onExit' EXIT
# Start an upload-server instance for testing
aio-upload-server-test start --log $logFile
# Give the upload-server some time to start :(
countdown "Starting" 5 > /dev/fd/3
# Run the tests
node $AIO_SCRIPTS_JS_DIR/dist/lib/verify-setup | tee /dev/fd/3

View File

@ -0,0 +1,35 @@
# VM Setup - Set up docker
## Install docker
_Debian (jessie):_
- `sudo apt-get update`
- `sudo apt-get install -y apt-transport-https ca-certificates curl git software-properties-common`
- `curl -fsSL https://apt.dockerproject.org/gpg | sudo apt-key add -`
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ debian-$(lsb_release -cs) main"`
- `sudo apt-get update`
- `sudo apt-get -y install docker-engine`
_Ubuntu (16.04):_
- `sudo apt-get update`
- `sudo apt-get install -y curl git linux-image-extra-$(uname -r) linux-image-extra-virtual`
- `sudo apt-get install -y apt-transport-https ca-certificates`
- `curl -fsSL https://yum.dockerproject.org/gpg | sudo apt-key add -`
- `apt-key fingerprint 58118E89F3A912897C070ADBF76221572C52609D`
- `sudo add-apt-repository "deb https://apt.dockerproject.org/repo/ ubuntu-$(lsb_release -cs) main"`
- `sudo apt-get update`
- `sudo apt-get -y install docker-engine`
## Start the docker
- `sudo service docker start`
## Test docker
- `sudo docker run hello-world`
## Start docker on boot
- `sudo systemctl enable docker`

View File

@ -0,0 +1,17 @@
# VM setup - Attach persistent disk
## Create `aio-builds` persistent disk (if not already exists)
- Follow instructions [here](https://cloud.google.com/compute/docs/disks/add-persistent-disk#create_disk).
- `sudo mkfs.ext4 -F -E lazy_itable_init=0,lazy_journal_init=0,discard /dev/disk/by-id/google-aio-builds`
## Mount disk
- `sudo mkdir -p /mnt/disks/aio-builds`
- `sudo mount -o discard,defaults /dev/disk/by-id/google-aio-builds /mnt/disks/aio-builds`
- `sudo chmod a+w /mnt/disks/aio-builds`
## Mount disk on boot
- ``echo UUID=`sudo blkid -s UUID -o value /dev/disk/by-id/google-aio-builds` \
/mnt/disks/aio-builds ext4 discard,defaults,nofail 0 2 | sudo tee -a /etc/fstab``

View File

@ -0,0 +1,29 @@
# VM Setup Instructions
- Set up docker
- Attach persistent disk
- Build docker image (+ checkout repo)
- Run image (+ setup for run on boot)
## Build image
- `<aio-builds-setup-dir>/build.sh [<name>[:<tag>]]`
## Run image
- `sudo docker run \
-d \
--dns 127.0.0.1 \
--name <instance-name> \
-p 80:80 \
-p 443:443 \
-v <host-snapshots-dir>:/var/www/aio-builds \
<name>[:<tag>]
`
## Questions
- Do we care to keep logs (e.g. cron, nginx, aio-upload-server, aio-clean-up, pm2) outside of the container?
- Currently, builds will only be remove when the PR is closed. It is possible to upload arbitrary builds (for non-existent commits) until then.
- Instead of creating new comments for each commit, update the original comment?
- Do we need a static IP address?
- Do we care about persistent disk automatic backup?

View File

@ -14,6 +14,7 @@
"lint": "yarn check-env && ng lint",
"pree2e": "webdriver-manager update --standalone false --gecko false",
"e2e": "yarn check-env && ng e2e --no-webdriver-update",
"deploy-preview": "scripts/deploy-preview.sh",
"deploy-staging": "firebase use staging --token \"$FIREBASE_TOKEN\" && yarn ~~deploy",
"pre~~deploy": "yarn build",
"~~deploy": "firebase deploy --message \"Commit: $TRAVIS_COMMIT\" --non-interactive --token \"$FIREBASE_TOKEN\"",

15
aio/scripts/deploy-preview.sh Executable file
View File

@ -0,0 +1,15 @@
#!/usr/bin/env bash
set -eux -o pipefail
INPUT_DIR=dist/
OUTPUT_FILE=/tmp/snapshot.tar.gz
AIO_BUILDS_HOST=https://ngbuilds.io
cd "`dirname $0`/.."
yarn run build -- --prod
tar --create --gzip --directory "$INPUT_DIR" --file "$OUTPUT_FILE" .
curl -iLX POST --header "Authorization: Token $NGBUILDS_IO_KEY" --data-binary "@$OUTPUT_FILE" \
"$AIO_BUILDS_HOST/create-build/$TRAVIS_PULL_REQUEST/$TRAVIS_COMMIT"
cd -

View File

@ -15,33 +15,53 @@ if [[ ${TRAVIS_TEST_RESULT=0} == 1 ]]; then
fi
# Don't deploy if not running against angular/angular and not a PR
# Don't deploy if not running against angular/angular
# TODO(i): because we don't let deploy to run outside of angular/angular folks can't use their
# private travis build to deploy anywhere. This is likely ok, but this means that @alexeagle's
# fancy setup to publish ES2015 packages to github -build repos no longer works. This is ok
# since with flat modules we'll have this feature built-in. We should still go and remove
# stuff that Alex put in for this from publish-build-artifacts.sh
if [[ ${TRAVIS_REPO_SLUG} != "angular/angular" || ${TRAVIS_PULL_REQUEST} != "false" ]]; then
echo "Skipping deploy to staging because this is a PR build."
if [[ ${TRAVIS_REPO_SLUG} != "angular/angular" ]]; then
echo "Skipping deploy because this is not angular/angular."
exit 0
fi
case ${CI_MODE} in
e2e)
# Don't deploy if this is a PR build
if [[ ${TRAVIS_PULL_REQUEST} != "false" ]]; then
echo "Skipping deploy because this is a PR build."
exit 0
fi
travisFoldStart "deploy.packages"
${thisDir}/publish-build-artifacts.sh
travisFoldEnd "deploy.packages"
;;
aio)
# aio deploy is setup only from master to aio-staging.firebaseapp.com for now
if [[ ${TRAVIS_BRANCH} == "master" ]]; then
# Don't deploy if this build is not for master
if [[ ${TRAVIS_BRANCH} != "master" ]]; then
echo "Skipping deploy because this build is not for master."
exit 0
fi
travisFoldStart "deploy.aio"
(
cd ${TRAVIS_BUILD_DIR}/aio
if [[ $TRAVIS_PULL_REQUEST != "false" ]]; then
# This is a PR: deploy a snapshot for previewing
travisFoldStart "deploy.aio.pr-preview"
yarn run deploy-preview
travisFoldEnd "deploy.aio.pr-preview"
else
# This is upstream master: Deploy to staging
travisFoldStart "deploy.aio.staging"
yarn run deploy-staging
travisFoldEnd "deploy.aio.staging"
fi
)
travisFoldEnd "deploy.aio"
fi
;;
esac