From 8be1300a0480a4418bf426b44866925747758652 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Tue, 28 Aug 2018 15:40:50 +0200 Subject: [PATCH 01/38] Update lodash dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d50086e..07a132a 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "bluebird": "3.4.6", "chalk": "1.1.3", "generator-phovea": "github:phovea/generator-phovea", - "lodash": "4.17.2", + "lodash": "4.17.10", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", From e02c4d5c1b01df50e9bf81716444621c2f1d8992 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Tue, 28 Aug 2018 15:41:17 +0200 Subject: [PATCH 02/38] Use phovea v2.0.0 --- phovea_product.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 1b047a7..0b140f4 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,7 +10,7 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "develop", + "branch": "v2.0.0", "additional": [ { "name": "taco_server", @@ -20,7 +20,7 @@ { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "develop" + "branch": "v2.0.0" } ], "data": [ From e97b732c3ff6d363747025bcf6d69bf3a9e5d187 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Mon, 19 Aug 2019 12:25:07 +0200 Subject: [PATCH 03/38] Switch to develop branches --- phovea_product.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 0b140f4..13adc7c 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -3,24 +3,24 @@ "type": "web", "label": "taco", "repo": "Caleydo/taco", - "branch": "master", + "branch": "develop", "additional": [] }, { "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "v2.0.0", + "branch": "develop", "additional": [ { "name": "taco_server", "repo": "Caleydo/taco_server", - "branch": "master" + "branch": "develop" }, { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "v2.0.0" + "branch": "develop" } ], "data": [ From 0e3e3a64965210d847670ff7e8517fb6c72c8c94 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Mon, 19 Aug 2019 12:42:11 +0200 Subject: [PATCH 04/38] Use python_3.7 branches --- phovea_product.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 13adc7c..51eaede 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,17 +10,17 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "develop", + "branch": "python_3.7", "additional": [ { "name": "taco_server", "repo": "Caleydo/taco_server", - "branch": "develop" + "branch": "python_3.7" }, { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "develop" + "branch": "python_3.7" } ], "data": [ From 4073c8e9632c1c837a9a7d11f174438494105511 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:00:00 +0100 Subject: [PATCH 05/38] use `destination` instead of removed `prefix` --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b4f22c4..95f3ba9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -59,7 +59,7 @@ jobs: node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build - prefix: build + destination: build - deploy: name: cleanup untagged aws repo command: | From 403621286608578f7c393a5c3f65166b09a985b2 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:02:26 +0100 Subject: [PATCH 06/38] update node version --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 07a132a..d267be9 100644 --- a/package.json +++ b/package.json @@ -21,8 +21,8 @@ "build" ], "engines": { - "npm": ">= 3", - "node": ">= 6", + "npm": ">= 6.12", + "node": ">= 12.13", "iojs": ">= 3" }, "scripts": { From c3051ad06d5f30a9ca251aaf5005dfa7f160bfd4 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:02:44 +0100 Subject: [PATCH 07/38] udpate `node.js` in _.travis.yml_ --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f9b94b0..dd5dc36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ language: node_js node_js: -- 6 +- 12.13 services: - docker From 79f240b1eb41e4233f377227fd15aab6473d38c3 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:06:26 +0100 Subject: [PATCH 08/38] run `yo phovea:update` --- .circleci/config.yml | 51 ++- .yo-rc.json | 8 +- Jenkinsfile | 6 +- README.md | 4 +- build.js | 763 ++++++++++++++++++++++++++++--------- package.json | 6 +- phovea_product.schema.json | 173 +++++++++ 7 files changed, 826 insertions(+), 185 deletions(-) create mode 100644 phovea_product.schema.json diff --git a/.circleci/config.yml b/.circleci/config.yml index 95f3ba9..cca0c06 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -8,19 +8,32 @@ jobs: steps: - checkout - setup_remote_docker + - run: + name: Show Node.js and npm version + command: | + node -v + npm -v + - run: + name: Show Python and pip version + command: | + python --version + pip --version - restore_cache: key: dependency-cache-{{ checksum "package.json" }} - run: - name: install-npm-wee + name: Install npm dependencies command: npm install - save_cache: key: dependency-cache-{{ checksum "package.json" }} paths: - ./node_modules + - run: + name: Show installed npm dependencies + command: npm list --depth=1 || true - restore_cache: key: awscli-1.11.113 - run: - name: install-aws-cli + name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate @@ -30,7 +43,7 @@ jobs: paths: - ~/venv - run: - name: login ecr and docker hub + name: Login AWS ECR and DockerHub command: | . ~/venv/bin/activate cat > ~/.dockercfg << EOF @@ -44,7 +57,7 @@ jobs: login="$(aws ecr get-login --no-include-email)" ${login} - deploy: - name: build and deploy + name: Build and deploy command: | . ~/venv/bin/activate case $CIRCLE_BRANCH in @@ -61,7 +74,7 @@ jobs: path: build destination: build - deploy: - name: cleanup untagged aws repo + name: Cleanup untagged AWS repositories command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 @@ -69,7 +82,7 @@ jobs: # list repos filter to just the one of this product and delete untagged ones aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: - name: restart aws #assumes the task definition is called - + name: Restart AWS task # assumes the task definition is called - command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 @@ -89,3 +102,29 @@ jobs: fi aws --output text ecs run-task --cluster caleydo --task-definition ${awsFamily} --started-by CircleCIAutoUpdate fi +workflows: + version: 2 +# build-nightly: +# triggers: +# - schedule: +# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 +# filters: +# branches: +# only: +# - develop +# jobs: +# - build + build-branch: + jobs: + - build: + filters: + tags: + ignore: /^v.*/ + build-tag: + jobs: + - build: + filters: + branches: + ignore: /.*/ + tags: + only: /^v.*/ diff --git a/.yo-rc.json b/.yo-rc.json index a268393..3f32617 100644 --- a/.yo-rc.json +++ b/.yo-rc.json @@ -4,6 +4,12 @@ "name": "taco_product", "author": "The Caleydo Team", "today": "Mon, 28 Nov 2016 13:28:08 GMT", - "githubAccount": "Caleydo" + "githubAccount": "Caleydo", + "promptValues": { + "authorName": "The Caleydo Team", + "authorEmail": "contact@caleydo.org", + "authorUrl": "", + "githubAccount": "Caleydo" + } } } \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index a2d194a..44e9742 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -21,9 +21,11 @@ node { try { withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=daily --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' + docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { + wrap([$class: 'Xvfb']) { + sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' } + } } } currentBuild.result = "SUCCESS" diff --git a/README.md b/README.md index 8568aa7..33b920a 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,13 @@ taco_product [![Phovea][phovea-image]][phovea-url] [![NPM version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Dependency Status][daviddm-image]][daviddm-url] ===================== -This is repository builds TACO, which is composed of the [taco_server](https://github.com/Caleydo/taco_server) and the [taco client](https://github.com/Caleydo/taco). -A simple way to install taco is using the [taco_product](https://github.com/Caleydo/taco_product). Installation ------------ ``` -git clone https://github.com/Caleydo/taco_product.git +git clone git@github.com:Caleydo/taco_product.git cd taco_product npm install ``` diff --git a/build.js b/build.js index 66b036c..f6283d2 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -15,32 +17,116 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); +const productName = pkg.name.replace('_product', ''); + +function showHelp(steps, chain) { + console.info(`node build.js -- step1 step2 +possible options: + * --quiet ... reduce log messages + * --serial ... build elements sequentially + * --skipTests ... skip tests + * --injectVersion ... injects the product version into the package.json of the built component + * --useSSH ... clone via ssh instead of https + * --skipCleanUp ... skip cleaning up old docker images + * --skipSaveImage ... skip saving the generated docker images + * --pushTo ... push docker images to the given registry + * --noDefaultTags ... don't push generated default tag : + * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop + * --forceLabel ... force to use the label even only a single service exists + * --dryRun ... just compute chain no execution + * --help ... show this help message + +arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed + `); + + steps = Object.keys(steps); + const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); + const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); + + console.info('possible primary steps:\n ', primary.join('\n ')); + console.info('possible secondary steps:\n ', secondary.join('\n ')); + + console.info('default chain:\n', JSON.stringify(chain, null, ' ')); +} +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -49,7 +135,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -72,11 +158,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -85,6 +173,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -96,25 +185,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -151,23 +251,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -190,17 +290,20 @@ function createQuietTerminalAdapter() { * runs yo internally * @param generator * @param options - * @param cwd + * @param {string} cwd + * @param {string[]|string} args */ -function yo(generator, options, cwd) { +function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); + const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run('phovea:' + generator, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -213,44 +316,41 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - console.log(cwd, chalk.blue(`running git clone --depth 1 -b ${p.branch} ${toRepoUrl(p.repo)} ${p.name}`)); - return spawn('git', ['clone', '--depth', '1', '-b', p.branch, toRepoUrlWithUser(p.repo), p.name], {cwd}); + + return yo('clone-repo', { + branch: p.branch, + extras: '--depth 1', + dir: p.name, + cwd + }, cwd, toRepoUrlWithUser(p.repo)); // pass repo url as argument } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -266,104 +366,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} - -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -380,6 +455,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -396,7 +488,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -409,6 +501,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -416,59 +716,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } diff --git a/package.json b/package.json index d267be9..a8609f9 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/Caleydo/taco_product.git" + "url": "git@github.com:Caleydo/taco_product.git" }, "files": [ "phovea_product.js", @@ -33,8 +33,8 @@ "dependencies": { "bluebird": "3.4.6", "chalk": "1.1.3", - "generator-phovea": "github:phovea/generator-phovea", - "lodash": "4.17.10", + "generator-phovea": "^2.0.0", + "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", diff --git a/phovea_product.schema.json b/phovea_product.schema.json new file mode 100644 index 0000000..3ed1003 --- /dev/null +++ b/phovea_product.schema.json @@ -0,0 +1,173 @@ +{ + "$id": "phovea_product", + "type": "array", + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "items": { + "$id": "phovea_product/items", + "type": "object", + "required": [ + "type", + "repo" + ], + "properties": { + "type": { + "$id": "phovea_product/items/properties/type", + "type": "string", + "title": "the type of product to build", + "default": "", + "examples": [ + "web", + "service", + "api" + ] + }, + "name": { + "$id": "phovea_product/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "guesses from the repository", + "examples": [ + "ordino", + "server" + ] + }, + "label": { + "$id": "phovea_product/items/properties/label", + "type": "string", + "title": "product label and docker image label", + "default": "=name", + "examples": [ + "ordino", + "server" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + }, + "additional": { + "$id": "phovea_product/items/properties/additional", + "type": "array", + "description": "additional repositories that should be included in the build", + "items": { + "$id": "phovea_product/items/properties/additional/items", + "type": "object", + "required": [ + "nane", + "repo" + ], + "properties": { + "name": { + "$id": "phovea_product/items/properties/additional/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "", + "examples": [ + "phovea_core" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/additional/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/additional/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + } + } + } + }, + "data": { + "$id": "phovea_product/items/properties/data", + "type": "array", + "description": "a list of data files that should be included in /phovea/_data of the server build", + "items": { + "$id": "phovea_product/items/properties/data/items", + "type": "object", + "properties": { + "name": { + "$id": "phovea_product/items/properties/data/items/properties/name", + "type": "string", + "title": "name to store the file as", + "default": "derived from url or repo" + }, + "url": { + "$id": "phovea_product/items/properties/data/items/properties/url", + "type": "string", + "title": "url to a file to download", + "description": "if the file doesn't start with http... it is assumed that the file is relative to https://s3.eu-central-1.amazonaws.com/phovea-data-packages/", + "default": "", + "examples": [ + "test.h5", + "https://test.com/test.txt" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/data/items/properties/repo", + "type": "string", + "title": "repository to clone that contains a data directory, the /data directory is cloned to /phovea/_data/", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + } + } + } + } + } + } +} From b4778c61404d00e8a1984e07578e73fef34e2a1c Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:10:14 +0100 Subject: [PATCH 09/38] use `circleci/python:3.7-buster-node-browsers` as Docker image --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index cca0c06..80cc25d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,7 +3,7 @@ jobs: build: working_directory: ~/phovea docker: - - image: caleydo/phovea_circleci_python:v1.0 + - image: circleci/python:3.7-buster-node-browsers # for node version see Dockerfile on https://hub.docker.com/r/circleci/python - image: docker:17.05.0-ce-git steps: - checkout From a5b55caa8b24e330c8850120b224e2472da348f9 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:12:35 +0100 Subject: [PATCH 10/38] use develop branches --- phovea_product.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 51eaede..13adc7c 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,17 +10,17 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "python_3.7", + "branch": "develop", "additional": [ { "name": "taco_server", "repo": "Caleydo/taco_server", - "branch": "python_3.7" + "branch": "develop" }, { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "python_3.7" + "branch": "develop" } ], "data": [ From 3afea507f34bac6ce462c1370c45535fca5540d4 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:16:29 +0100 Subject: [PATCH 11/38] update `awscli` version --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 80cc25d..2df9084 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,15 +31,15 @@ jobs: name: Show installed npm dependencies command: npm list --depth=1 || true - restore_cache: - key: awscli-1.11.113 + key: awscli-1.16.313 - run: name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.11.113 + pip install awscli==1.16.313 - save_cache: - key: awscli-1.11.113 + key: awscli-1.16.313 paths: - ~/venv - run: From a9c5355f205bad4b0fdaa68d6194bbb8fdd532f7 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:19:05 +0100 Subject: [PATCH 12/38] revert `awscli` version --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2df9084..80cc25d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,15 +31,15 @@ jobs: name: Show installed npm dependencies command: npm list --depth=1 || true - restore_cache: - key: awscli-1.16.313 + key: awscli-1.11.113 - run: name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.16.313 + pip install awscli==1.11.113 - save_cache: - key: awscli-1.16.313 + key: awscli-1.11.113 paths: - ~/venv - run: From 7e9faf9a32af2d8e624d010f4dee249f155a617b Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 9 Jan 2020 11:20:03 +0100 Subject: [PATCH 13/38] use python 2 circleci Docker image --- .circleci/config.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 80cc25d..a0e689e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,8 +3,7 @@ jobs: build: working_directory: ~/phovea docker: - - image: circleci/python:3.7-buster-node-browsers # for node version see Dockerfile on https://hub.docker.com/r/circleci/python - - image: docker:17.05.0-ce-git + - image: caleydo/phovea_circleci_python:v3.0 steps: - checkout - setup_remote_docker From b2cfea94e2d20c98a044bb9e1ee165b183deb23c Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:06:49 +0100 Subject: [PATCH 14/38] Delete .travis.yml --- .travis.yml | 36 ------------------------------------ 1 file changed, 36 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index dd5dc36..0000000 --- a/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -language: node_js - -node_js: -- 12.13 - -services: -- docker - -before_install: -- export DISPLAY=:99.0 -- sh -e /etc/init.d/xvfb start -- if [[ `npm -v` != 3* ]]; then npm i -g npm@3; fi - -before_script: -- pip install --user awscli -- export PATH=$PATH:$HOME/.local/bin - -# build and push -#- $(aws ecr get-login --region eu-central-1) -#script: node build.js --skipTests --skipSaveImage --pushTo=$AWS_ECR_PREFIX --pushLatest --pushDaily - -script: node build.js --skipSaveImage - -deploy: - provider: releases - api_key: - secure: TK9/P34Bi3WuppiDrBCwVcn41yCBwmILaU8hXTBzUPbT7TbeFIwsC6/4CtH85Z+ZrUve4S5pTmWRNf2dQDxWw3uYu7+bJuemV2J1LHG76mognj+TNEiYxfLQUt3Gql4W7C7FcI4Rlx5/uMN9wY1wro8TWUBMwT6jjSrUWIvK3GXoojd5bHvJx07XpjWl9wCon4D0ruZiFoM2mdeP23lbc2GckETi32oEKswnQXxkMACmxbPzoWbvkxH4aK8Bt2Rj2sl2TbPhVkN6DAkHGkGAvLI+2/aRfG27+oo3OKsaDjbuGABct8TfZccJ970CbQ8kbnCjYxstvqkg1JWjF0W67sX/flBZZOEUA5l0OLWo6HqMGMxm7/lEQhIdPMsRmvXL+HVOxkMrB2dda58QzxVwiZp+rRqUaeabPZp8Kl5xodGrVxsBvxe6zAbJ5jCtCSumG6+kLyKI00/kYlghqQNrgUw0ZsYJlQ34h3lo/24QpaeyDpQoCkGWQgtgqiXGpeKSu7bCnOqIqAy3nbT9Utwj7K8gIasTG5idosEAz/THMampNbGDuyxxc340sYGNMg9Bhm1g2ILWRdtV470p5hwBtIDTKi3/PAizEO26+Wh0zI47Sg3ao57avcbCsTmzbZUeA5J4bojmchhJCHX8su9cSCGh/2fJA/1eBIgEvOQ8LNE= - file: build/* - on: - tags: true - -notifications: - slack: - secure: E8/1UIdHSczUbN+6i6gd1d5LM4vmLdwLQ30tpyjvnM0wvfDce76oPxLJAy240WJ5ybXRZUtNrttpVpt4tEXCy8aLFCmxD7s77rVloH+q1J8R/ptTFWZGhFGEujk1awEmVbzcWxJkV9/JENQaeGBKxwv8/EQwWwEkAb7p/+AJb9owmH88b3wUZUGHBWtbMiyyaF4Rm1Wg1stJB8Z1Ga7PRF4cqufTgcDdsCPVv9gAY+VxOIGqX/Vfuc9UWpUH8vq8lHUE7Inn5QS78kuFfSgLWga3H6Mu/Gko1XNlWk0QWWQBUvEZ6ZC6Wuo68KzvUjJHDTnx8WyfHue2JNHIslcX+eJq2WHLeEgM24VeNkILCGo/H/60NGHiSjrIv/Y9h6bQ9FDjo6TUyE4nbdPYN1RN9FQ5UbI9Y4Gi753H9mqnHWlEywBOzHxdZCAuz9Wh03CCF/blsvJ+Obbyo6Jrfe+g44jyi9kQdBNQ78qG6v4EXws8FiYao6x3PpgIwFix42Cpr+soAh5FpA3C1zHSAyZZpXF65/lrDl5yPNofK7Wy0B9bw+0I6Z/u7ZKFNVZXvYPGYvtUVcsALGBdmYc61+LCta36Po0KZseWVAlJj6QnOJDYzv0wvV/zsuf9A5KpYFGiqV9Q7zmtiO5FYF5sBy+lE7O9tHVO4O18IRndhRQgxhs= - on_success: change - on_failure: always From 78d270e49b23ca83dc008943ba731e1dc1a2e9fa Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:08:03 +0100 Subject: [PATCH 15/38] Update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 33b920a..177385f 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ taco_product [![Phovea][phovea-image]][phovea-url] [![NPM version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Dependency Status][daviddm-image]][daviddm-url] ===================== +This is repository builds TACO, which is composed of the [taco_server](https://github.com/Caleydo/taco_server) and the [taco client](https://github.com/Caleydo/taco). - +A simple way to install taco is using the [taco_product](https://github.com/Caleydo/taco_product). Installation ------------ From 7e29871f9e4d247a8c6095f2a5a0622947929af2 Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:12:20 +0100 Subject: [PATCH 16/38] Update Jenkinsfile --- Jenkinsfile | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 44e9742..a2d194a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -21,11 +21,9 @@ node { try { withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' + wrap([$class: 'Xvfb']) { + sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=daily --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' } - } } } currentBuild.result = "SUCCESS" From 04fd6baf23605191bdf8f7e7d71b6b03723f6dc9 Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:13:38 +0100 Subject: [PATCH 17/38] Delete phovea_product.schema.json --- phovea_product.schema.json | 173 ------------------------------------- 1 file changed, 173 deletions(-) delete mode 100644 phovea_product.schema.json diff --git a/phovea_product.schema.json b/phovea_product.schema.json deleted file mode 100644 index 3ed1003..0000000 --- a/phovea_product.schema.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "$id": "phovea_product", - "type": "array", - "definitions": {}, - "$schema": "http://json-schema.org/draft-07/schema#", - "items": { - "$id": "phovea_product/items", - "type": "object", - "required": [ - "type", - "repo" - ], - "properties": { - "type": { - "$id": "phovea_product/items/properties/type", - "type": "string", - "title": "the type of product to build", - "default": "", - "examples": [ - "web", - "service", - "api" - ] - }, - "name": { - "$id": "phovea_product/items/properties/name", - "type": "string", - "title": "name of the repo", - "default": "guesses from the repository", - "examples": [ - "ordino", - "server" - ] - }, - "label": { - "$id": "phovea_product/items/properties/label", - "type": "string", - "title": "product label and docker image label", - "default": "=name", - "examples": [ - "ordino", - "server" - ] - }, - "repo": { - "$id": "phovea_product/items/properties/repo", - "type": "string", - "title": "repository to use", - "description": "either a full git url or in the form /", - "default": "", - "examples": [ - "Caleydo/ordino" - ] - }, - "symlink": { - "$id": "phovea_product/items/properties/symlink", - "type": "string", - "title": "location relative to the product directory which contains the clone repository", - "description": "Note symbolic links will be created to ensure the proper structure", - "default": "", - "examples": [ - "../myclone" - ] - }, - "branch": { - "$id": "phovea_product/items/properties/branch", - "type": "string", - "title": "the branch, tag, or sha1 commit to use", - "default": "master", - "examples": [ - "master", - "v1.2.3", - "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", - "develop" - ] - }, - "additional": { - "$id": "phovea_product/items/properties/additional", - "type": "array", - "description": "additional repositories that should be included in the build", - "items": { - "$id": "phovea_product/items/properties/additional/items", - "type": "object", - "required": [ - "nane", - "repo" - ], - "properties": { - "name": { - "$id": "phovea_product/items/properties/additional/items/properties/name", - "type": "string", - "title": "name of the repo", - "default": "", - "examples": [ - "phovea_core" - ] - }, - "repo": { - "$id": "phovea_product/items/properties/additional/items/properties/repo", - "type": "string", - "title": "repository to use", - "description": "either a full git url or in the form /", - "default": "", - "examples": [ - "Caleydo/ordino" - ] - }, - "symlink": { - "$id": "phovea_product/items/properties/symlink", - "type": "string", - "title": "location relative to the product directory which contains the clone repository", - "description": "Note symbolic links will be created to ensure the proper structure", - "default": "", - "examples": [ - "../myclone" - ] - }, - "branch": { - "$id": "phovea_product/items/properties/additional/items/properties/branch", - "type": "string", - "title": "the branch, tag, or sha1 commit to use", - "default": "master", - "examples": [ - "master", - "v1.2.3", - "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", - "develop" - ] - } - } - } - }, - "data": { - "$id": "phovea_product/items/properties/data", - "type": "array", - "description": "a list of data files that should be included in /phovea/_data of the server build", - "items": { - "$id": "phovea_product/items/properties/data/items", - "type": "object", - "properties": { - "name": { - "$id": "phovea_product/items/properties/data/items/properties/name", - "type": "string", - "title": "name to store the file as", - "default": "derived from url or repo" - }, - "url": { - "$id": "phovea_product/items/properties/data/items/properties/url", - "type": "string", - "title": "url to a file to download", - "description": "if the file doesn't start with http... it is assumed that the file is relative to https://s3.eu-central-1.amazonaws.com/phovea-data-packages/", - "default": "", - "examples": [ - "test.h5", - "https://test.com/test.txt" - ] - }, - "repo": { - "$id": "phovea_product/items/properties/data/items/properties/repo", - "type": "string", - "title": "repository to clone that contains a data directory, the /data directory is cloned to /phovea/_data/", - "description": "either a full git url or in the form /", - "default": "", - "examples": [ - "Caleydo/ordino" - ] - } - } - } - } - } - } -} From b20671680f6016f791ebe22e360eeb92abc93ff9 Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:14:44 +0100 Subject: [PATCH 18/38] Update build.js --- build.js | 763 +++++++++++++------------------------------------------ 1 file changed, 170 insertions(+), 593 deletions(-) diff --git a/build.js b/build.js index f6283d2..66b036c 100644 --- a/build.js +++ b/build.js @@ -7,9 +7,7 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); -// see show help const argv = require('yargs-parser')(process.argv.slice(2)); - const quiet = argv.quiet !== undefined; const now = new Date(); @@ -17,116 +15,32 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); -const productName = pkg.name.replace('_product', ''); - -function showHelp(steps, chain) { - console.info(`node build.js -- step1 step2 -possible options: - * --quiet ... reduce log messages - * --serial ... build elements sequentially - * --skipTests ... skip tests - * --injectVersion ... injects the product version into the package.json of the built component - * --useSSH ... clone via ssh instead of https - * --skipCleanUp ... skip cleaning up old docker images - * --skipSaveImage ... skip saving the generated docker images - * --pushTo ... push docker images to the given registry - * --noDefaultTags ... don't push generated default tag : - * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop - * --forceLabel ... force to use the label even only a single service exists - * --dryRun ... just compute chain no execution - * --help ... show this help message - -arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed - `); - - steps = Object.keys(steps); - const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); - const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); - - console.info('possible primary steps:\n ', primary.join('\n ')); - console.info('possible secondary steps:\n ', secondary.join('\n ')); - - console.info('default chain:\n', JSON.stringify(chain, null, ' ')); -} -/** - * generates a repo url to clone depending on the argv.useSSH option - * @param {string} url the repo url either in git@ for https:// form - * @returns the clean repo url - */ function toRepoUrl(url) { - if (url.startsWith('git@')) { - if (argv.useSSH) { - return url; - } - // have an ssh url need an http url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `https://${m[3]}/${m[4]}.git`; - } - if (url.startsWith('http')) { - if (!argv.useSSH) { - return url; - } - // have a http url need an ssh url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `git@${m[2]}:${m[4]}.git`; - } - if (!url.includes('/')) { - url = `Caleydo/${url}`; - } if (argv.useSSH) { - return `git@github.com:${url}.git`; + return `git@github.com:${url}.git` } - return `https://github.com/${url}.git`; + return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; } -/** - * guesses the credentials environment variable based on the given repository hostname - * @param {string} repo - */ -function guessUserName(repo) { - // extract the host - const host = repo.match(/:\/\/([^/]+)/)[1]; - const hostClean = host.replace(/\./g, '_').toUpperCase(); - // e.g. GITHUB_COM_CREDENTIALS - const envVar = process.env[`${hostClean}_CREDENTIALS`]; - if (envVar) { - return envVar; - } - return process.env.PHOVEA_GITHUB_CREDENTIALS; -} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - if (repo.startsWith('git@')) { // ssh + const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; + if (repo.includes('git@github.com') || !username_and_password) { return repo; } - const usernameAndPassword = guessUserName(repo); - if (!usernameAndPassword) { // ssh or no user given - return repo; - } - return repo.replace('://', `://${usernameAndPassword}@`); + return repo.replace('://', `://${username_and_password}@`); } + function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/([^/]+)\.git/)[0]; + return url.match(/\/(.*)\.git/)[0] } return url.slice(url.lastIndexOf('/') + 1); } -/** - * deep merge with array union - * @param {*} target - * @param {*} source - */ -function mergeWith(target, source) { - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - _.mergeWith(target, source, mergeArrayUnion); - return target; -} - function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -135,7 +49,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - http.get(url, (response) => { + const request = http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -158,13 +72,11 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); - switch (desc.type) { - case 'url': { - desc.name = desc.name || toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); - } - case 'repo': { + switch(desc.type) { + case 'url': + const destName = toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); + case 'repo': desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -173,7 +85,6 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); - } default: console.error('unknown data type:', desc.type); return null; @@ -185,36 +96,25 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options - * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); - const out = []; - if (p.stdout) { - p.stdout.on('data', (chunk) => out.push(chunk)); - } - if (p.stderr) { - p.stderr.on('data', (chunk) => out.push(chunk)); - } + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - if (args.quiet) { - // log output what has been captured - console.log(out.join('\n')); - } - reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); + reject(`${cmd} failed with status code ${code} ${signal}`); } }); }); } + /** * run npm with the given args * @param cwd working directory @@ -251,23 +151,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code === 0 ? resolve() : reject(code)); + p2.on('close', (code) => code == 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages() { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); +function dockerRemoveImages(productName) { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve) => { + return new Promise((resolve, reject) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code === 0) { + if (code == 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -290,20 +190,17 @@ function createQuietTerminalAdapter() { * runs yo internally * @param generator * @param options - * @param {string} cwd - * @param {string[]|string} args + * @param cwd */ -function yo(generator, options, cwd, args) { +function yo(generator, options, cwd) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - const _args = Array.isArray(args) ? args.join(' ') : args || ''; + yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.lookup(() => { - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); - }); + yeomanEnv.run('phovea:' + generator, options, resolve); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -316,41 +213,44 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - - return yo('clone-repo', { - branch: p.branch, - extras: '--depth 1', - dir: p.name, - cwd - }, cwd, toRepoUrlWithUser(p.repo)); // pass repo url as argument + console.log(cwd, chalk.blue(`running git clone --depth 1 -b ${p.branch} ${toRepoUrl(p.repo)} ${p.name}`)); + return spawn('git', ['clone', '--depth', '1', '-b', p.branch, toRepoUrlWithUser(p.repo), p.name], {cwd}); } function resolvePluginType(p, dir) { - if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { - p.pluginType = 'lib'; - p.isHybridType = false; - return; - } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } +function preBuild(p, dir) { + const hasAdditional = p.additional.length > 0; + let act = fs.emptyDirAsync(dir) + .then(() => cloneRepo(p, dir)) + .then(() => resolvePluginType(p, dir)); + if (hasAdditional) { + act = act + .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); + } + return act; +} + function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); + } else { + return Promise.resolve({}); } - return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - // copy data from first service + //copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -366,79 +266,104 @@ function patchComposeFile(p, composeTemplate) { return r; } -function patchDockerfile(p, dockerFile) { - if (!p.baseImage) { - return null; - } - return fs.readFileAsync(dockerFile).then((content) => { - content = content.toString(); - // patch the Dockerfile by replacing the FROM statement - const r = /^\s*FROM (.+)\s*$/igm; - const fromImage = r.exec(content)[1]; - console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); - content = content.replace(r, `FROM ${p.baseImage}`); - return fs.writeFileAsync(dockerFile, content); - }); + +function postBuild(p, dir, buildInSubDir) { + return Promise.resolve(null) + .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) + .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) + .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) + .then(mergeCompose); } -function patchWorkspace(p) { - // prepend docker_script in the workspace - if (fs.existsSync('./docker_script.sh')) { - console.log('patch workspace and prepend docker_script.sh'); - let content = fs.readFileSync('./docker_script.sh').toString(); - if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { - content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); +function buildWebApp(p, dir) { + console.log(dir, chalk.blue('building web application:'), p.label); + const name = p.name; + const hasAdditional = p.additional.length > 0; + let act = preBuild(p, dir); + //let act = Promise.resolve(null); + if (hasAdditional) { + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)) + .then(() => npm(dir, 'install')); + //test all modules + if (hasAdditional && !argv.skipTests) { + act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); } - fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); + act = act + .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); + } else { + act = act + .then(() => npm(dir + '/' + name, 'install')) + .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); } + return act + .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) + .then(postBuild.bind(null, p, dir, true)); +} - if (argv.injectVersion) { - const pkgfile = `${p.tmpDir}/${p.name}/package.json`; - if (fs.existsSync(pkgfile)) { - const ppkg = require(pkgfile); - ppkg.version = pkg.version; - fs.writeJSONSync(pkgfile, ppkg); - } else { - console.warn('cannot inject version, main package.json not found'); - } +function buildServerApp(p, dir) { + console.log(dir, chalk.blue('building service package:'), p.label); + const name = p.name; + + let act = preBuild(p, dir); + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)); + + if (!argv.skipTests) { + act = act + .then(() => console.log(chalk.yellow('create test environment'))) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); } - // inject extra phovea.js - if (fs.existsSync('./phovea.js')) { - console.log('patch workspace and add workspace phovea.js'); - let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); - fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); - - registry += `\n\n - import {register} from 'phovea_core/src/plugin'; - register('__product',require('./phovea.js')); - `; - fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); + act = act + .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) + .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); + + //copy all together + act = act + .then(() => fs.ensureDirAsync(`${dir}/build/source`)) + .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); + + //copy data packages + act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); + //let act = Promise.resolve([]); + + //copy main deploy thing and create a docker out of it + return act + .then(() => fs.ensureDirAsync(`${dir}/deploy`)) + .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) + .then(postBuild.bind(null, p, dir, false)); +} + +function buildImpl(d, dir) { + switch (d.type) { + case 'static': + case 'web': + return buildWebApp(d, dir); + case 'api': + d.name = d.name || 'phovea_server'; + return buildServerApp(d, dir); + case 'service': + return buildServerApp(d, dir); + default: + console.error(chalk.red('unknown product type: ' + d.type)); + return Promise.resolve(null); } } function mergeCompose(composePartials) { let dockerCompose = {}; - composePartials.forEach((c) => mergeWith(dockerCompose, c)); + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); return dockerCompose; } -function buildComposePartials(descs) { - const validDescs = descs.filter((d) => !d.error); - - // merge a big compose file including all - return Promise.all(validDescs.map((p) => { - return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) - .then((partials) => { - p.composePartial = mergeCompose(partials); - }); - })); -} - -function buildCompose(descs, dockerComposePatch) { +function buildCompose(descs, composePartials) { console.log('create docker-compose.yml'); - - const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); + const dockerCompose = mergeCompose(composePartials); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -455,23 +380,6 @@ function buildCompose(descs, dockerComposePatch) { services[w].links.push(`${s.label}:${s.name}`); }); }); - - if (services._host) { - // inline _host to apis - const host = services._host; - delete services._host; - api.forEach((s) => { - services[s] = mergeCompose([host, services[s]]); - }); - } - - Object.keys(dockerComposePatch.services).forEach((service) => { - if (services[service] !== undefined) { - console.log(`patch generated docker-compose file for ${service}`); - mergeWith(services[service], dockerComposePatch.services[service]); - } - }); - const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -488,7 +396,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { // push additional custom prefix without the version + if (argv.pushExtra) { //push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -501,214 +409,6 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } -function loadPatchFile() { - const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); - if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { - return {services: {}}; - } - const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); - const yaml = require('yamljs'); - const r = yaml.parse(content.toString()); - if (!r.services) { - r.services = {}; - } - return r; -} - -function fillDefaults(descs, dockerComposePatch) { - const singleService = descs.length === 1 && (argv.forceLabel === undefined); - - descs.forEach((d, i) => { - // default values - d.additional = d.additional || []; - d.data = d.data || []; - d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); - d.label = d.label || d.name; - d.symlink = d.symlink || null; // default value - d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; - // incorporate patch file - if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { - // use a different base image to build the item - d.baseImage = dockerComposePatch.services[d.label].image; - delete dockerComposePatch.services[d.label].image; - } - // include hint in the tmp directory which one is it - d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; - }); - - return descs; -} - -function asChain(steps, chain) { - if (chain.length === 0) { - return []; - } - const possibleSteps = Object.keys(steps); - - const callable = (c) => { - if (typeof c === 'function') { - return c; - } - - if (typeof c === 'string') { - // simple lookup - if (!possibleSteps.includes(c)) { - console.error('invalid step:', c); - throw new Error('invalid step: ' + c); - } - return callable(steps[c]); - } - - if (Array.isArray(c)) { // sequential sub started - const sub = c.map(callable); - return () => { - console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); - let step = Promise.resolve(); - for (const s of sub) { - step = step.then(s); - } - return step; - }; - } - // parallel = object - const sub = Object.keys(c).map((ci) => callable(c[ci])); - return () => { - console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); - return Promise.all(sub.map((d) => d())); // run sub lazy combined with all - }; - }; - return chain.map(callable); -} - -function runChain(chain, catchErrors) { - let start = null; - let step = new Promise((resolve) => { - start = resolve; - }); - - for (const c of chain) { - step = step.then(c); - } - - step.catch(catchErrors); - - return () => { - start(); // resolve first to start chain - return step; // return last result - }; -} - -function strObject(items) { - const obj = {}; - for (const item of items) { - obj[item] = item; - } - return obj; -} - -function buildDockerImage(p) { - const buildInSubDir = p.type === 'web' || p.type === 'static'; - let buildArgs = ''; - // pass through http_proxy, no_proxy, and https_proxy env variables - for (const key of Object.keys(process.env)) { - const lkey = key.toLowerCase(); - if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { - // pass through - buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; - } - } - - // patch the docker file with the with an optional given baseImage - return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) - // create the container image - .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) - // tag the container image - .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); -} - -function createWorkspace(p) { - return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) - .then(() => patchWorkspace(p)); -} - -function installWebDependencies(p) { - return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); -} - -function cleanUpWebDependencies(p) { - return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); -} - -function resolvePluginTypes(p) { - if (p.pluginType) { - return Promise.resolve(); // already resolved - } - if (p.additional.length === 0) { - return resolvePluginType(p, p.tmpDir); - } - return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); -} - -function testWebAdditionals(p) { - return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); -} - -function buildWeb(p) { - const hasAdditional = p.additional.length > 0; - - let step; - if (hasAdditional) { - step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); - } else { - step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); - } - // move to target directory - return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); -} - -function installPythonTestDependencies(p) { - console.log(chalk.yellow('create test environment')); - return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); -} - -function buildServer(p) { - let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); - for (const pi of p.additional) { - act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); - } - - // copy all together - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); - - // copy main deploy thing and create a docker out of it - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); - - return act; -} - -function downloadServerDataFiles(p) { - if (!argv.serial) { - return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); - } - // serial - let act = Promise.resolve(); - for (const d of p.data) { - act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); - } - return act; -} - -function cleanWorkspace(descs) { - console.log(chalk.yellow('clean workspace')); - return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); -} - if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -716,182 +416,59 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { + // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const dockerComposePatch = loadPatchFile(); - const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); - - if (fs.existsSync('.yo-rc.json')) { - fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); - } - fs.ensureDirSync('build'); - - const cleanUp = () => { - if (fs.existsSync('.yo-rc_tmp.json')) { - fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); - } - }; - - const catchProductBuild = (p, act) => { - // no chaining to keep error - act.catch((error) => { - p.error = error; - console.error('ERROR building ', p.name, error); - }); - return act; - }; - - const steps = { - clean: () => cleanWorkspace(descs), - prune: dockerRemoveImages, - compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), - push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), - summary: () => { + const descs = require('./phovea_product.json'); + const singleService = descs.length === 1; + const productName = pkg.name.replace('_product', ''); + + + fs.emptyDirAsync('build') + .then(dockerRemoveImages.bind(this, productName)) + // move my own .yo-rc.json to avoid a conflict + .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) + .then(() => { + const buildOne = (d, i) => { + d.additional = d.additional || []; //default values + d.data = d.data || []; + d.name = d.name || fromRepoUrl(d.repo); + d.label = d.label || d.name; + if (singleService) { + d.image = `${productName}:${pkg.version}`; + } else { + d.image = `${productName}/${d.label}:${pkg.version}`; + } + let wait = buildImpl(d, './tmp' + i); + wait.catch((error) => { + d.error = error; + console.error('ERROR building ', d, error); + }); + return wait; + }; + if (argv.serial) { + let r = Promise.resolve([]); + for (let i = 0; i < descs.length; ++i) { + r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); + } + return r; + } else { + return Promise.all(descs.map(buildOne)); + } + }) + .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) + .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) + .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) + .then(() => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); - cleanUp(); if (anyErrors) { process.exit(1); } - } - }; - - const webTypes = ['static', 'web']; - const serverTypes = ['api', 'service']; - - const chainProducts = []; - for (let i = 0; i < descs.length; ++i) { - const p = descs[i]; - const suffix = p.name; - const hasAdditional = p.additional.length > 0; - const isWeb = webTypes.includes(p.type); - const isServer = serverTypes.includes(p.type); - - if (!isWeb && !isServer) { - console.error(chalk.red('unknown product type: ' + p.type)); - continue; - } - - fs.ensureDirSync(p.tmpDir); - - // clone repo - const subSteps = []; - steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); - subSteps.push(`clone:${suffix}`); - - if (hasAdditional) { - // clone extras - const cloneKeys = []; - for (const pi of p.additional) { - const key = `clone:${suffix}:${pi.name}`; - steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); - cloneKeys.push(key); - } - - if (argv.serial) { - subSteps.push(...cloneKeys); - } else { - subSteps.push(strObject(cloneKeys)); - } - } - - const needsWorskpace = (isWeb && hasAdditional) || isServer; - steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; - - if (isWeb) { - steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); - } else { // server - steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); - } - steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; - steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); - steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); - steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; - steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); - steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); - - subSteps.push(`prepare:${suffix}`); - subSteps.push(`install:${suffix}`); - if (!argv.skipTests) { - subSteps.push(`test:${suffix}`); - } - subSteps.push(`build:${suffix}`); - if (isServer && p.data.length > 0) { - subSteps.push(`data:${suffix}`); - } - if (isWeb) { - subSteps.push(`postbuild:${suffix}`); - } - subSteps.push(`image:${suffix}`); - if (!argv.skipSaveImage) { - subSteps.push(`save:${suffix}`); - } - - steps[`product:${suffix}`] = subSteps; - subSteps.name = `product:${suffix}`; - chainProducts.push(subSteps); - } - - // create some meta steps - { - const stepNames = Object.keys(steps); - for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { - const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); - if (sub.length <= 0) { - continue; - } - steps[meta] = argv.serial ? sub : strObject(sub); - } - } - - const chain = ['clean']; - - if (!argv.skipCleanUp) { - chain.push('prune'); - } - - if (argv.serial) { - chain.push(...chainProducts); // serially - } else { - const par = {}; - chainProducts.forEach((c) => { - par[c.name] = c; - }); - chain.push(par); // as object = parallel - } - // result of the promise is an array of partial docker compose files - - chain.push('compose'); - if (argv.pushTo) { - chain.push('push'); - } - chain.push('summary'); - - // XX. catch all error handling - const catchErrors = (error) => { + }).catch((error) => { console.error('ERROR extra building ', error); - // rename back - cleanUp(); process.exit(1); - }; - - if (argv.help) { - showHelp(steps, chain); - cleanUp(); - process.exit(0); - } - - if (argv._.length > 0) { - // explicit chain replace computed one - chain.splice(0, chain.length, ...argv._); - } - - console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); - const toExecute = asChain(steps, chain); - const launch = runChain(toExecute, catchErrors); - if (!argv.dryRun) { - launch(); - } + }); } From 7cd4e52c584a617282a203f84b6233e835ad04bc Mon Sep 17 00:00:00 2001 From: Anita Steiner Date: Thu, 9 Jan 2020 17:20:33 +0100 Subject: [PATCH 19/38] Update build.js --- build.js | 762 ++++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 592 insertions(+), 170 deletions(-) diff --git a/build.js b/build.js index 66b036c..6b6e445 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -15,32 +17,115 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); +const productName = pkg.name.replace('_product', ''); + +function showHelp(steps, chain) { + console.info(`node build.js -- step1 step2 +possible options: + * --quiet ... reduce log messages + * --serial ... build elements sequentially + * --skipTests ... skip tests + * --injectVersion ... injects the product version into the package.json of the built component + * --useSSH ... clone via ssh instead of https + * --skipCleanUp ... skip cleaning up old docker images + * --skipSaveImage ... skip saving the generated docker images + * --pushTo ... push docker images to the given registry + * --noDefaultTags ... don't push generated default tag : + * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop + * --forceLabel ... force to use the label even only a single service exists + * --dryRun ... just compute chain no execution + * --help ... show this help message +arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed + `); + + steps = Object.keys(steps); + const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); + const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); + + console.info('possible primary steps:\n ', primary.join('\n ')); + console.info('possible secondary steps:\n ', secondary.join('\n ')); + + console.info('default chain:\n', JSON.stringify(chain, null, ' ')); +} +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -49,7 +134,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -72,11 +157,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -85,6 +172,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -96,25 +184,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -151,23 +250,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -190,17 +289,20 @@ function createQuietTerminalAdapter() { * runs yo internally * @param generator * @param options - * @param cwd + * @param {string} cwd + * @param {string[]|string} args */ -function yo(generator, options, cwd) { +function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); + const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run('phovea:' + generator, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -213,44 +315,41 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - console.log(cwd, chalk.blue(`running git clone --depth 1 -b ${p.branch} ${toRepoUrl(p.repo)} ${p.name}`)); - return spawn('git', ['clone', '--depth', '1', '-b', p.branch, toRepoUrlWithUser(p.repo), p.name], {cwd}); + + return yo('clone-repo', { + branch: p.branch, + extras: '--depth 1', + dir: p.name, + cwd + }, cwd, toRepoUrlWithUser(p.repo)); // pass repo url as argument } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -266,104 +365,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} - -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -380,6 +454,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -396,7 +487,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -409,6 +500,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -416,59 +715,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } From 365b0475d63ab724b347738c5c96fb3d9387a88b Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:14:00 +0100 Subject: [PATCH 20/38] remove Jenkinsfile and _.travis.yml_ --- .travis.yml | 36 ------------------------------------ Jenkinsfile | 46 ---------------------------------------------- 2 files changed, 82 deletions(-) delete mode 100644 .travis.yml delete mode 100644 Jenkinsfile diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index dd5dc36..0000000 --- a/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -language: node_js - -node_js: -- 12.13 - -services: -- docker - -before_install: -- export DISPLAY=:99.0 -- sh -e /etc/init.d/xvfb start -- if [[ `npm -v` != 3* ]]; then npm i -g npm@3; fi - -before_script: -- pip install --user awscli -- export PATH=$PATH:$HOME/.local/bin - -# build and push -#- $(aws ecr get-login --region eu-central-1) -#script: node build.js --skipTests --skipSaveImage --pushTo=$AWS_ECR_PREFIX --pushLatest --pushDaily - -script: node build.js --skipSaveImage - -deploy: - provider: releases - api_key: - secure: TK9/P34Bi3WuppiDrBCwVcn41yCBwmILaU8hXTBzUPbT7TbeFIwsC6/4CtH85Z+ZrUve4S5pTmWRNf2dQDxWw3uYu7+bJuemV2J1LHG76mognj+TNEiYxfLQUt3Gql4W7C7FcI4Rlx5/uMN9wY1wro8TWUBMwT6jjSrUWIvK3GXoojd5bHvJx07XpjWl9wCon4D0ruZiFoM2mdeP23lbc2GckETi32oEKswnQXxkMACmxbPzoWbvkxH4aK8Bt2Rj2sl2TbPhVkN6DAkHGkGAvLI+2/aRfG27+oo3OKsaDjbuGABct8TfZccJ970CbQ8kbnCjYxstvqkg1JWjF0W67sX/flBZZOEUA5l0OLWo6HqMGMxm7/lEQhIdPMsRmvXL+HVOxkMrB2dda58QzxVwiZp+rRqUaeabPZp8Kl5xodGrVxsBvxe6zAbJ5jCtCSumG6+kLyKI00/kYlghqQNrgUw0ZsYJlQ34h3lo/24QpaeyDpQoCkGWQgtgqiXGpeKSu7bCnOqIqAy3nbT9Utwj7K8gIasTG5idosEAz/THMampNbGDuyxxc340sYGNMg9Bhm1g2ILWRdtV470p5hwBtIDTKi3/PAizEO26+Wh0zI47Sg3ao57avcbCsTmzbZUeA5J4bojmchhJCHX8su9cSCGh/2fJA/1eBIgEvOQ8LNE= - file: build/* - on: - tags: true - -notifications: - slack: - secure: E8/1UIdHSczUbN+6i6gd1d5LM4vmLdwLQ30tpyjvnM0wvfDce76oPxLJAy240WJ5ybXRZUtNrttpVpt4tEXCy8aLFCmxD7s77rVloH+q1J8R/ptTFWZGhFGEujk1awEmVbzcWxJkV9/JENQaeGBKxwv8/EQwWwEkAb7p/+AJb9owmH88b3wUZUGHBWtbMiyyaF4Rm1Wg1stJB8Z1Ga7PRF4cqufTgcDdsCPVv9gAY+VxOIGqX/Vfuc9UWpUH8vq8lHUE7Inn5QS78kuFfSgLWga3H6Mu/Gko1XNlWk0QWWQBUvEZ6ZC6Wuo68KzvUjJHDTnx8WyfHue2JNHIslcX+eJq2WHLeEgM24VeNkILCGo/H/60NGHiSjrIv/Y9h6bQ9FDjo6TUyE4nbdPYN1RN9FQ5UbI9Y4Gi753H9mqnHWlEywBOzHxdZCAuz9Wh03CCF/blsvJ+Obbyo6Jrfe+g44jyi9kQdBNQ78qG6v4EXws8FiYao6x3PpgIwFix42Cpr+soAh5FpA3C1zHSAyZZpXF65/lrDl5yPNofK7Wy0B9bw+0I6Z/u7ZKFNVZXvYPGYvtUVcsALGBdmYc61+LCta36Po0KZseWVAlJj6QnOJDYzv0wvV/zsuf9A5KpYFGiqV9Q7zmtiO5FYF5sBy+lE7O9tHVO4O18IRndhRQgxhs= - on_success: change - on_failure: always diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index 44e9742..0000000 --- a/Jenkinsfile +++ /dev/null @@ -1,46 +0,0 @@ -node { - stage('Checkout') { - checkout scm - } - - stage('Before Install') { - def nodeHome = tool 'node-v7' - env.PATH="${env.PATH}:${nodeHome}/bin" - def dockerHome = tool 'docker' - env.PATH="${env.PATH}:${dockerHome}/bin" - } - - stage('Install') { - sh 'node -v' - sh 'npm --version' - sh 'docker --version' - sh 'npm install' - } - - stage('Build') { - try { - withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { - docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' - } - } - } - } - currentBuild.result = "SUCCESS" - } catch (e) { - // if any exception occurs, mark the build as failed - currentBuild.result = 'FAILURE' - throw e - } finally { - // always clean up - sh 'npm prune' - sh 'rm node_modules -rf' - } - } - - stage('Post Build') { - archiveArtifacts artifacts: 'build/*' - } -} From 3bca41314f0ceb0a499e02ad959bf08837277b94 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:14:20 +0100 Subject: [PATCH 21/38] use `circleci/python:3.7-buster-node-browsers` as Docker image --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a0e689e..2cc66a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,7 +3,7 @@ jobs: build: working_directory: ~/phovea docker: - - image: caleydo/phovea_circleci_python:v3.0 + - image: circleci/python:3.7-buster-node-browsers # for node version see Dockerfile on https://hub.docker.com/r/circleci/python steps: - checkout - setup_remote_docker From 6192222340836d8bd35a99a344ffd87fc47926ad Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:14:31 +0100 Subject: [PATCH 22/38] update awscli version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2cc66a5..a555232 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,15 +30,15 @@ jobs: name: Show installed npm dependencies command: npm list --depth=1 || true - restore_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 - run: name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.11.113 + pip install awscli==1.16.312 - save_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 paths: - ~/venv - run: @@ -79,7 +79,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 baseName=${CIRCLE_PROJECT_REPONAME%_product} # list repos filter to just the one of this product and delete untagged ones - aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done + aws ecr describe-repositories --output text | cut -f6 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: name: Restart AWS task # assumes the task definition is called - command: | From e07418171d5b89735bef2aa9a0ebd79c2eaa33fb Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:16:51 +0100 Subject: [PATCH 23/38] revert changes for build.js --- build.js | 763 +++++++++++++------------------------------------------ 1 file changed, 170 insertions(+), 593 deletions(-) diff --git a/build.js b/build.js index f6283d2..66b036c 100644 --- a/build.js +++ b/build.js @@ -7,9 +7,7 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); -// see show help const argv = require('yargs-parser')(process.argv.slice(2)); - const quiet = argv.quiet !== undefined; const now = new Date(); @@ -17,116 +15,32 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); -const productName = pkg.name.replace('_product', ''); - -function showHelp(steps, chain) { - console.info(`node build.js -- step1 step2 -possible options: - * --quiet ... reduce log messages - * --serial ... build elements sequentially - * --skipTests ... skip tests - * --injectVersion ... injects the product version into the package.json of the built component - * --useSSH ... clone via ssh instead of https - * --skipCleanUp ... skip cleaning up old docker images - * --skipSaveImage ... skip saving the generated docker images - * --pushTo ... push docker images to the given registry - * --noDefaultTags ... don't push generated default tag : - * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop - * --forceLabel ... force to use the label even only a single service exists - * --dryRun ... just compute chain no execution - * --help ... show this help message - -arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed - `); - - steps = Object.keys(steps); - const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); - const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); - - console.info('possible primary steps:\n ', primary.join('\n ')); - console.info('possible secondary steps:\n ', secondary.join('\n ')); - - console.info('default chain:\n', JSON.stringify(chain, null, ' ')); -} -/** - * generates a repo url to clone depending on the argv.useSSH option - * @param {string} url the repo url either in git@ for https:// form - * @returns the clean repo url - */ function toRepoUrl(url) { - if (url.startsWith('git@')) { - if (argv.useSSH) { - return url; - } - // have an ssh url need an http url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `https://${m[3]}/${m[4]}.git`; - } - if (url.startsWith('http')) { - if (!argv.useSSH) { - return url; - } - // have a http url need an ssh url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `git@${m[2]}:${m[4]}.git`; - } - if (!url.includes('/')) { - url = `Caleydo/${url}`; - } if (argv.useSSH) { - return `git@github.com:${url}.git`; + return `git@github.com:${url}.git` } - return `https://github.com/${url}.git`; + return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; } -/** - * guesses the credentials environment variable based on the given repository hostname - * @param {string} repo - */ -function guessUserName(repo) { - // extract the host - const host = repo.match(/:\/\/([^/]+)/)[1]; - const hostClean = host.replace(/\./g, '_').toUpperCase(); - // e.g. GITHUB_COM_CREDENTIALS - const envVar = process.env[`${hostClean}_CREDENTIALS`]; - if (envVar) { - return envVar; - } - return process.env.PHOVEA_GITHUB_CREDENTIALS; -} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - if (repo.startsWith('git@')) { // ssh + const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; + if (repo.includes('git@github.com') || !username_and_password) { return repo; } - const usernameAndPassword = guessUserName(repo); - if (!usernameAndPassword) { // ssh or no user given - return repo; - } - return repo.replace('://', `://${usernameAndPassword}@`); + return repo.replace('://', `://${username_and_password}@`); } + function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/([^/]+)\.git/)[0]; + return url.match(/\/(.*)\.git/)[0] } return url.slice(url.lastIndexOf('/') + 1); } -/** - * deep merge with array union - * @param {*} target - * @param {*} source - */ -function mergeWith(target, source) { - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - _.mergeWith(target, source, mergeArrayUnion); - return target; -} - function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -135,7 +49,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - http.get(url, (response) => { + const request = http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -158,13 +72,11 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); - switch (desc.type) { - case 'url': { - desc.name = desc.name || toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); - } - case 'repo': { + switch(desc.type) { + case 'url': + const destName = toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); + case 'repo': desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -173,7 +85,6 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); - } default: console.error('unknown data type:', desc.type); return null; @@ -185,36 +96,25 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options - * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); - const out = []; - if (p.stdout) { - p.stdout.on('data', (chunk) => out.push(chunk)); - } - if (p.stderr) { - p.stderr.on('data', (chunk) => out.push(chunk)); - } + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - if (args.quiet) { - // log output what has been captured - console.log(out.join('\n')); - } - reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); + reject(`${cmd} failed with status code ${code} ${signal}`); } }); }); } + /** * run npm with the given args * @param cwd working directory @@ -251,23 +151,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code === 0 ? resolve() : reject(code)); + p2.on('close', (code) => code == 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages() { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); +function dockerRemoveImages(productName) { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve) => { + return new Promise((resolve, reject) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code === 0) { + if (code == 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -290,20 +190,17 @@ function createQuietTerminalAdapter() { * runs yo internally * @param generator * @param options - * @param {string} cwd - * @param {string[]|string} args + * @param cwd */ -function yo(generator, options, cwd, args) { +function yo(generator, options, cwd) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - const _args = Array.isArray(args) ? args.join(' ') : args || ''; + yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.lookup(() => { - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); - }); + yeomanEnv.run('phovea:' + generator, options, resolve); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -316,41 +213,44 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - - return yo('clone-repo', { - branch: p.branch, - extras: '--depth 1', - dir: p.name, - cwd - }, cwd, toRepoUrlWithUser(p.repo)); // pass repo url as argument + console.log(cwd, chalk.blue(`running git clone --depth 1 -b ${p.branch} ${toRepoUrl(p.repo)} ${p.name}`)); + return spawn('git', ['clone', '--depth', '1', '-b', p.branch, toRepoUrlWithUser(p.repo), p.name], {cwd}); } function resolvePluginType(p, dir) { - if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { - p.pluginType = 'lib'; - p.isHybridType = false; - return; - } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } +function preBuild(p, dir) { + const hasAdditional = p.additional.length > 0; + let act = fs.emptyDirAsync(dir) + .then(() => cloneRepo(p, dir)) + .then(() => resolvePluginType(p, dir)); + if (hasAdditional) { + act = act + .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); + } + return act; +} + function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); + } else { + return Promise.resolve({}); } - return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - // copy data from first service + //copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -366,79 +266,104 @@ function patchComposeFile(p, composeTemplate) { return r; } -function patchDockerfile(p, dockerFile) { - if (!p.baseImage) { - return null; - } - return fs.readFileAsync(dockerFile).then((content) => { - content = content.toString(); - // patch the Dockerfile by replacing the FROM statement - const r = /^\s*FROM (.+)\s*$/igm; - const fromImage = r.exec(content)[1]; - console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); - content = content.replace(r, `FROM ${p.baseImage}`); - return fs.writeFileAsync(dockerFile, content); - }); + +function postBuild(p, dir, buildInSubDir) { + return Promise.resolve(null) + .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) + .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) + .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) + .then(mergeCompose); } -function patchWorkspace(p) { - // prepend docker_script in the workspace - if (fs.existsSync('./docker_script.sh')) { - console.log('patch workspace and prepend docker_script.sh'); - let content = fs.readFileSync('./docker_script.sh').toString(); - if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { - content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); +function buildWebApp(p, dir) { + console.log(dir, chalk.blue('building web application:'), p.label); + const name = p.name; + const hasAdditional = p.additional.length > 0; + let act = preBuild(p, dir); + //let act = Promise.resolve(null); + if (hasAdditional) { + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)) + .then(() => npm(dir, 'install')); + //test all modules + if (hasAdditional && !argv.skipTests) { + act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); } - fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); + act = act + .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); + } else { + act = act + .then(() => npm(dir + '/' + name, 'install')) + .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); } + return act + .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) + .then(postBuild.bind(null, p, dir, true)); +} - if (argv.injectVersion) { - const pkgfile = `${p.tmpDir}/${p.name}/package.json`; - if (fs.existsSync(pkgfile)) { - const ppkg = require(pkgfile); - ppkg.version = pkg.version; - fs.writeJSONSync(pkgfile, ppkg); - } else { - console.warn('cannot inject version, main package.json not found'); - } +function buildServerApp(p, dir) { + console.log(dir, chalk.blue('building service package:'), p.label); + const name = p.name; + + let act = preBuild(p, dir); + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)); + + if (!argv.skipTests) { + act = act + .then(() => console.log(chalk.yellow('create test environment'))) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); } - // inject extra phovea.js - if (fs.existsSync('./phovea.js')) { - console.log('patch workspace and add workspace phovea.js'); - let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); - fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); - - registry += `\n\n - import {register} from 'phovea_core/src/plugin'; - register('__product',require('./phovea.js')); - `; - fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); + act = act + .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) + .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); + + //copy all together + act = act + .then(() => fs.ensureDirAsync(`${dir}/build/source`)) + .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); + + //copy data packages + act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); + //let act = Promise.resolve([]); + + //copy main deploy thing and create a docker out of it + return act + .then(() => fs.ensureDirAsync(`${dir}/deploy`)) + .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) + .then(postBuild.bind(null, p, dir, false)); +} + +function buildImpl(d, dir) { + switch (d.type) { + case 'static': + case 'web': + return buildWebApp(d, dir); + case 'api': + d.name = d.name || 'phovea_server'; + return buildServerApp(d, dir); + case 'service': + return buildServerApp(d, dir); + default: + console.error(chalk.red('unknown product type: ' + d.type)); + return Promise.resolve(null); } } function mergeCompose(composePartials) { let dockerCompose = {}; - composePartials.forEach((c) => mergeWith(dockerCompose, c)); + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); return dockerCompose; } -function buildComposePartials(descs) { - const validDescs = descs.filter((d) => !d.error); - - // merge a big compose file including all - return Promise.all(validDescs.map((p) => { - return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) - .then((partials) => { - p.composePartial = mergeCompose(partials); - }); - })); -} - -function buildCompose(descs, dockerComposePatch) { +function buildCompose(descs, composePartials) { console.log('create docker-compose.yml'); - - const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); + const dockerCompose = mergeCompose(composePartials); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -455,23 +380,6 @@ function buildCompose(descs, dockerComposePatch) { services[w].links.push(`${s.label}:${s.name}`); }); }); - - if (services._host) { - // inline _host to apis - const host = services._host; - delete services._host; - api.forEach((s) => { - services[s] = mergeCompose([host, services[s]]); - }); - } - - Object.keys(dockerComposePatch.services).forEach((service) => { - if (services[service] !== undefined) { - console.log(`patch generated docker-compose file for ${service}`); - mergeWith(services[service], dockerComposePatch.services[service]); - } - }); - const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -488,7 +396,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { // push additional custom prefix without the version + if (argv.pushExtra) { //push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -501,214 +409,6 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } -function loadPatchFile() { - const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); - if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { - return {services: {}}; - } - const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); - const yaml = require('yamljs'); - const r = yaml.parse(content.toString()); - if (!r.services) { - r.services = {}; - } - return r; -} - -function fillDefaults(descs, dockerComposePatch) { - const singleService = descs.length === 1 && (argv.forceLabel === undefined); - - descs.forEach((d, i) => { - // default values - d.additional = d.additional || []; - d.data = d.data || []; - d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); - d.label = d.label || d.name; - d.symlink = d.symlink || null; // default value - d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; - // incorporate patch file - if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { - // use a different base image to build the item - d.baseImage = dockerComposePatch.services[d.label].image; - delete dockerComposePatch.services[d.label].image; - } - // include hint in the tmp directory which one is it - d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; - }); - - return descs; -} - -function asChain(steps, chain) { - if (chain.length === 0) { - return []; - } - const possibleSteps = Object.keys(steps); - - const callable = (c) => { - if (typeof c === 'function') { - return c; - } - - if (typeof c === 'string') { - // simple lookup - if (!possibleSteps.includes(c)) { - console.error('invalid step:', c); - throw new Error('invalid step: ' + c); - } - return callable(steps[c]); - } - - if (Array.isArray(c)) { // sequential sub started - const sub = c.map(callable); - return () => { - console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); - let step = Promise.resolve(); - for (const s of sub) { - step = step.then(s); - } - return step; - }; - } - // parallel = object - const sub = Object.keys(c).map((ci) => callable(c[ci])); - return () => { - console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); - return Promise.all(sub.map((d) => d())); // run sub lazy combined with all - }; - }; - return chain.map(callable); -} - -function runChain(chain, catchErrors) { - let start = null; - let step = new Promise((resolve) => { - start = resolve; - }); - - for (const c of chain) { - step = step.then(c); - } - - step.catch(catchErrors); - - return () => { - start(); // resolve first to start chain - return step; // return last result - }; -} - -function strObject(items) { - const obj = {}; - for (const item of items) { - obj[item] = item; - } - return obj; -} - -function buildDockerImage(p) { - const buildInSubDir = p.type === 'web' || p.type === 'static'; - let buildArgs = ''; - // pass through http_proxy, no_proxy, and https_proxy env variables - for (const key of Object.keys(process.env)) { - const lkey = key.toLowerCase(); - if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { - // pass through - buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; - } - } - - // patch the docker file with the with an optional given baseImage - return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) - // create the container image - .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) - // tag the container image - .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); -} - -function createWorkspace(p) { - return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) - .then(() => patchWorkspace(p)); -} - -function installWebDependencies(p) { - return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); -} - -function cleanUpWebDependencies(p) { - return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); -} - -function resolvePluginTypes(p) { - if (p.pluginType) { - return Promise.resolve(); // already resolved - } - if (p.additional.length === 0) { - return resolvePluginType(p, p.tmpDir); - } - return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); -} - -function testWebAdditionals(p) { - return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); -} - -function buildWeb(p) { - const hasAdditional = p.additional.length > 0; - - let step; - if (hasAdditional) { - step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); - } else { - step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); - } - // move to target directory - return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); -} - -function installPythonTestDependencies(p) { - console.log(chalk.yellow('create test environment')); - return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); -} - -function buildServer(p) { - let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); - for (const pi of p.additional) { - act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); - } - - // copy all together - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); - - // copy main deploy thing and create a docker out of it - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); - - return act; -} - -function downloadServerDataFiles(p) { - if (!argv.serial) { - return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); - } - // serial - let act = Promise.resolve(); - for (const d of p.data) { - act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); - } - return act; -} - -function cleanWorkspace(descs) { - console.log(chalk.yellow('clean workspace')); - return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); -} - if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -716,182 +416,59 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { + // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const dockerComposePatch = loadPatchFile(); - const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); - - if (fs.existsSync('.yo-rc.json')) { - fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); - } - fs.ensureDirSync('build'); - - const cleanUp = () => { - if (fs.existsSync('.yo-rc_tmp.json')) { - fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); - } - }; - - const catchProductBuild = (p, act) => { - // no chaining to keep error - act.catch((error) => { - p.error = error; - console.error('ERROR building ', p.name, error); - }); - return act; - }; - - const steps = { - clean: () => cleanWorkspace(descs), - prune: dockerRemoveImages, - compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), - push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), - summary: () => { + const descs = require('./phovea_product.json'); + const singleService = descs.length === 1; + const productName = pkg.name.replace('_product', ''); + + + fs.emptyDirAsync('build') + .then(dockerRemoveImages.bind(this, productName)) + // move my own .yo-rc.json to avoid a conflict + .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) + .then(() => { + const buildOne = (d, i) => { + d.additional = d.additional || []; //default values + d.data = d.data || []; + d.name = d.name || fromRepoUrl(d.repo); + d.label = d.label || d.name; + if (singleService) { + d.image = `${productName}:${pkg.version}`; + } else { + d.image = `${productName}/${d.label}:${pkg.version}`; + } + let wait = buildImpl(d, './tmp' + i); + wait.catch((error) => { + d.error = error; + console.error('ERROR building ', d, error); + }); + return wait; + }; + if (argv.serial) { + let r = Promise.resolve([]); + for (let i = 0; i < descs.length; ++i) { + r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); + } + return r; + } else { + return Promise.all(descs.map(buildOne)); + } + }) + .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) + .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) + .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) + .then(() => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); - cleanUp(); if (anyErrors) { process.exit(1); } - } - }; - - const webTypes = ['static', 'web']; - const serverTypes = ['api', 'service']; - - const chainProducts = []; - for (let i = 0; i < descs.length; ++i) { - const p = descs[i]; - const suffix = p.name; - const hasAdditional = p.additional.length > 0; - const isWeb = webTypes.includes(p.type); - const isServer = serverTypes.includes(p.type); - - if (!isWeb && !isServer) { - console.error(chalk.red('unknown product type: ' + p.type)); - continue; - } - - fs.ensureDirSync(p.tmpDir); - - // clone repo - const subSteps = []; - steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); - subSteps.push(`clone:${suffix}`); - - if (hasAdditional) { - // clone extras - const cloneKeys = []; - for (const pi of p.additional) { - const key = `clone:${suffix}:${pi.name}`; - steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); - cloneKeys.push(key); - } - - if (argv.serial) { - subSteps.push(...cloneKeys); - } else { - subSteps.push(strObject(cloneKeys)); - } - } - - const needsWorskpace = (isWeb && hasAdditional) || isServer; - steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; - - if (isWeb) { - steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); - } else { // server - steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); - } - steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; - steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); - steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); - steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; - steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); - steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); - - subSteps.push(`prepare:${suffix}`); - subSteps.push(`install:${suffix}`); - if (!argv.skipTests) { - subSteps.push(`test:${suffix}`); - } - subSteps.push(`build:${suffix}`); - if (isServer && p.data.length > 0) { - subSteps.push(`data:${suffix}`); - } - if (isWeb) { - subSteps.push(`postbuild:${suffix}`); - } - subSteps.push(`image:${suffix}`); - if (!argv.skipSaveImage) { - subSteps.push(`save:${suffix}`); - } - - steps[`product:${suffix}`] = subSteps; - subSteps.name = `product:${suffix}`; - chainProducts.push(subSteps); - } - - // create some meta steps - { - const stepNames = Object.keys(steps); - for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { - const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); - if (sub.length <= 0) { - continue; - } - steps[meta] = argv.serial ? sub : strObject(sub); - } - } - - const chain = ['clean']; - - if (!argv.skipCleanUp) { - chain.push('prune'); - } - - if (argv.serial) { - chain.push(...chainProducts); // serially - } else { - const par = {}; - chainProducts.forEach((c) => { - par[c.name] = c; - }); - chain.push(par); // as object = parallel - } - // result of the promise is an array of partial docker compose files - - chain.push('compose'); - if (argv.pushTo) { - chain.push('push'); - } - chain.push('summary'); - - // XX. catch all error handling - const catchErrors = (error) => { + }).catch((error) => { console.error('ERROR extra building ', error); - // rename back - cleanUp(); process.exit(1); - }; - - if (argv.help) { - showHelp(steps, chain); - cleanUp(); - process.exit(0); - } - - if (argv._.length > 0) { - // explicit chain replace computed one - chain.splice(0, chain.length, ...argv._); - } - - console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); - const toExecute = asChain(steps, chain); - const launch = runChain(toExecute, catchErrors); - if (!argv.dryRun) { - launch(); - } + }); } From f5691b1a94feec1a3c385aec80a943ae43d250d0 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:19:10 +0100 Subject: [PATCH 24/38] update phovea-generator --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index a8609f9..b90134b 100644 --- a/package.json +++ b/package.json @@ -33,12 +33,12 @@ "dependencies": { "bluebird": "3.4.6", "chalk": "1.1.3", - "generator-phovea": "^2.0.0", + "generator-phovea": "^3.1.0", "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", - "yeoman-environment": "1.6.6", + "yeoman-environment": "2.7.0", "fs-extra": "^1.0.0" } } From fde4ac196054e1f1ed631d7e2d7ffc878db00473 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:55:26 +0100 Subject: [PATCH 25/38] revert dependencies --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index b90134b..7242ab7 100644 --- a/package.json +++ b/package.json @@ -33,12 +33,12 @@ "dependencies": { "bluebird": "3.4.6", "chalk": "1.1.3", - "generator-phovea": "^3.1.0", + "fs-extra": "^1.0.0", + "generator-phovea": "^2.0.0", "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", - "yeoman-environment": "2.7.0", - "fs-extra": "^1.0.0" + "yeoman-environment": "1.6.6" } } From 4fed73ebf842954cca6095459b62b06ec762a8d8 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 10:21:37 +0100 Subject: [PATCH 26/38] run `yo phovea:update` --- build.js | 733 ++++++++++++++++++++++++++++--------- phovea_product.schema.json | 173 +++++++++ 2 files changed, 736 insertions(+), 170 deletions(-) create mode 100644 phovea_product.schema.json diff --git a/build.js b/build.js index 70f2085..f6283d2 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -33,6 +35,7 @@ possible options: * --forceLabel ... force to use the label even only a single service exists * --dryRun ... just compute chain no execution * --help ... show this help message + arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed `); @@ -46,31 +49,84 @@ arguments: (starting with --!) optional list of steps to execute in the given or console.info('default chain:\n', JSON.stringify(chain, null, ' ')); } +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -79,7 +135,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -102,11 +158,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -115,6 +173,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -126,25 +185,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -181,23 +251,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -220,17 +290,20 @@ function createQuietTerminalAdapter() { * runs yo internally * @param generator * @param options - * @param cwd + * @param {string} cwd + * @param {string[]|string} args */ -function yo(generator, options, cwd) { +function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); + const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run('phovea:' + generator, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -243,44 +316,41 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - console.log(cwd, chalk.blue(`running git clone --depth 1 -b ${p.branch} ${toRepoUrl(p.repo)} ${p.name}`)); - return spawn('git', ['clone', '--depth', '1', '-b', p.branch, toRepoUrlWithUser(p.repo), p.name], {cwd}); + + return yo('clone-repo', { + branch: p.branch, + extras: '--depth 1', + dir: p.name, + cwd + }, cwd, toRepoUrlWithUser(p.repo)); // pass repo url as argument } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -296,104 +366,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} - -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -410,6 +455,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -426,7 +488,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -439,6 +501,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -446,59 +716,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } diff --git a/phovea_product.schema.json b/phovea_product.schema.json new file mode 100644 index 0000000..3ed1003 --- /dev/null +++ b/phovea_product.schema.json @@ -0,0 +1,173 @@ +{ + "$id": "phovea_product", + "type": "array", + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "items": { + "$id": "phovea_product/items", + "type": "object", + "required": [ + "type", + "repo" + ], + "properties": { + "type": { + "$id": "phovea_product/items/properties/type", + "type": "string", + "title": "the type of product to build", + "default": "", + "examples": [ + "web", + "service", + "api" + ] + }, + "name": { + "$id": "phovea_product/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "guesses from the repository", + "examples": [ + "ordino", + "server" + ] + }, + "label": { + "$id": "phovea_product/items/properties/label", + "type": "string", + "title": "product label and docker image label", + "default": "=name", + "examples": [ + "ordino", + "server" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + }, + "additional": { + "$id": "phovea_product/items/properties/additional", + "type": "array", + "description": "additional repositories that should be included in the build", + "items": { + "$id": "phovea_product/items/properties/additional/items", + "type": "object", + "required": [ + "nane", + "repo" + ], + "properties": { + "name": { + "$id": "phovea_product/items/properties/additional/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "", + "examples": [ + "phovea_core" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/additional/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/additional/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + } + } + } + }, + "data": { + "$id": "phovea_product/items/properties/data", + "type": "array", + "description": "a list of data files that should be included in /phovea/_data of the server build", + "items": { + "$id": "phovea_product/items/properties/data/items", + "type": "object", + "properties": { + "name": { + "$id": "phovea_product/items/properties/data/items/properties/name", + "type": "string", + "title": "name to store the file as", + "default": "derived from url or repo" + }, + "url": { + "$id": "phovea_product/items/properties/data/items/properties/url", + "type": "string", + "title": "url to a file to download", + "description": "if the file doesn't start with http... it is assumed that the file is relative to https://s3.eu-central-1.amazonaws.com/phovea-data-packages/", + "default": "", + "examples": [ + "test.h5", + "https://test.com/test.txt" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/data/items/properties/repo", + "type": "string", + "title": "repository to clone that contains a data directory, the /data directory is cloned to /phovea/_data/", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + } + } + } + } + } + } +} From 3ae31b90258827d6725abeee892625fd50495810 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 10:27:50 +0100 Subject: [PATCH 27/38] Fix prepare:taco step See commit https://github.com/Caleydo/gapminder_product/commit/49736c95364a9e95e7848dae10fc8e5845b1c487 --- build.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/build.js b/build.js index f6283d2..e5ed314 100644 --- a/build.js +++ b/build.js @@ -798,7 +798,9 @@ if (require.main === module) { } const needsWorskpace = (isWeb && hasAdditional) || isServer; - steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + if(needsWorskpace) { + steps[`prepare:${suffix}`] = () => catchProductBuild(p, createWorkspace(p)); + } if (isWeb) { steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); @@ -812,7 +814,9 @@ if (require.main === module) { steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); - subSteps.push(`prepare:${suffix}`); + if(needsWorskpace) { + subSteps.push(`prepare:${suffix}`); + } subSteps.push(`install:${suffix}`); if (!argv.skipTests) { subSteps.push(`test:${suffix}`); From 48dea82e8f395a03835e6c68588c28fc17e51751 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 10:39:46 +0100 Subject: [PATCH 28/38] replace `/` with underscore in branch name --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a555232..fa3d6f9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -64,7 +64,7 @@ jobs: awsTag="latest" ;; *) - awsTag="${CIRCLE_BRANCH}" + awsTag="${CIRCLE_BRANCH//\//_}" # replace `/` with `_` in branch name ;; esac echo "using tag: --${awsTag}--" @@ -87,7 +87,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 # cleanup name by removing the _product suffix baseName=${CIRCLE_PROJECT_REPONAME%_product} - awsFamily="${baseName}-${CIRCLE_BRANCH}" + awsFamily="${baseName}-${CIRCLE_BRANCH//\//_}" # replace `/` with `_` in branch name echo "awsFamily --${awsFamily}--" tasksExists=$(aws --output text ecs list-task-definitions --family-prefix ${awsFamily}) echo "existsTaskDefinition? --${tasksExists}--" From 1b9a258155b56f8789770009f234b2ab71d6f7b2 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 11:00:40 +0100 Subject: [PATCH 29/38] update dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 7242ab7..744fc3a 100644 --- a/package.json +++ b/package.json @@ -34,11 +34,11 @@ "bluebird": "3.4.6", "chalk": "1.1.3", "fs-extra": "^1.0.0", - "generator-phovea": "^2.0.0", + "generator-phovea": "github:phovea/generator-phovea#develop", "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", - "yeoman-environment": "1.6.6" + "yeoman-environment": "2.7.0" } } From 01a904727098ee5336cb1f617319e0bf907a35a2 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 11:09:11 +0100 Subject: [PATCH 30/38] remove taco server for testing --- phovea_product.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 13adc7c..c91cc66 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -12,11 +12,6 @@ "repo": "phovea/phovea_server", "branch": "develop", "additional": [ - { - "name": "taco_server", - "repo": "Caleydo/taco_server", - "branch": "develop" - }, { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", From 14ef3d01f24f50777edf2377e5e14e640a768ec2 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 11:13:32 +0100 Subject: [PATCH 31/38] remove phovea_data_hdf for testing --- phovea_product.json | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index c91cc66..e3401d3 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -11,13 +11,7 @@ "label": "taco_server", "repo": "phovea/phovea_server", "branch": "develop", - "additional": [ - { - "name": "phovea_data_hdf", - "repo": "phovea/phovea_data_hdf", - "branch": "develop" - } - ], + "additional": [], "data": [ { "type": "url", From 6083d5adbaa1ebea970a88f6ba6cefb147170b54 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 12:38:48 +0100 Subject: [PATCH 32/38] revert changes for testing and switch to branch with updated url --- phovea_product.json | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index e3401d3..8170f41 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,8 +10,19 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "develop", - "additional": [], + "branch": "stoiber/update_debian_url_in_Dockerfile", + "additional": [ + { + "name": "taco_server", + "repo": "Caleydo/taco_server", + "branch": "develop" + }, + { + "name": "phovea_data_hdf", + "repo": "phovea/phovea_data_hdf", + "branch": "develop" + } + ], "data": [ { "type": "url", From ef9842e88c3447f23379aafb8e3fa351b36b6e0c Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 13:01:46 +0100 Subject: [PATCH 33/38] switch to branch develop --- phovea_product.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phovea_product.json b/phovea_product.json index 8170f41..13adc7c 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,7 +10,7 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "stoiber/update_debian_url_in_Dockerfile", + "branch": "develop", "additional": [ { "name": "taco_server", From 8bb9cc35be52d4839b1047d47d2949b849ddbd7c Mon Sep 17 00:00:00 2001 From: rumersdorfer <45141967+rumersdorfer@users.noreply.github.com> Date: Wed, 29 Jan 2020 15:39:38 +0100 Subject: [PATCH 34/38] add CODEOWNERS file --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..9836a90 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @thinkh From c2b5abb9783da53d39485cb3dcdbdf0b958b995b Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Thu, 30 Jan 2020 14:27:49 +0100 Subject: [PATCH 35/38] Switch to develop branch --- phovea_product.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 9d19288..13adc7c 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,7 +10,7 @@ "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "v2.0.0", + "branch": "develop", "additional": [ { "name": "taco_server", @@ -20,7 +20,7 @@ { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "v2.0.0" + "branch": "develop" } ], "data": [ From 0656ee6d57b2fc6bc076d7011654c496709984fd Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Thu, 30 Jan 2020 14:31:08 +0100 Subject: [PATCH 36/38] Activate nightly develop build --- .circleci/config.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fa3d6f9..621b6b7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -103,16 +103,16 @@ jobs: fi workflows: version: 2 -# build-nightly: -# triggers: -# - schedule: -# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 -# filters: -# branches: -# only: -# - develop -# jobs: -# - build + build-nightly: + triggers: + - schedule: + cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 + filters: + branches: + only: + - develop + jobs: + - build build-branch: jobs: - build: From 07dce88274dc0ade0e746ed6f578098cea996e91 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Thu, 30 Jan 2020 14:33:42 +0100 Subject: [PATCH 37/38] Add `--serial` flag for parallel builds --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 621b6b7..4af8614 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -68,7 +68,7 @@ jobs: ;; esac echo "using tag: --${awsTag}--" - node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo + node build.js --serial --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build destination: build From f4a76457991e2a0da4118a0338a35dd6cd3c19af Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Thu, 30 Jan 2020 15:36:46 +0100 Subject: [PATCH 38/38] Prepeare release 3.0.0 --- package.json | 4 ++-- phovea_product.json | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 744fc3a..3e80a0c 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "taco_product", "description": "", "homepage": "https://phovea.caleydo.org", - "version": "0.0.1-SNAPSHOT", + "version": "3.0.0", "author": { "name": "The Caleydo Team", "email": "contact@caleydo.org", @@ -34,7 +34,7 @@ "bluebird": "3.4.6", "chalk": "1.1.3", "fs-extra": "^1.0.0", - "generator-phovea": "github:phovea/generator-phovea#develop", + "generator-phovea": "3.1.0", "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", diff --git a/phovea_product.json b/phovea_product.json index 13adc7c..14a4742 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -3,24 +3,24 @@ "type": "web", "label": "taco", "repo": "Caleydo/taco", - "branch": "develop", + "branch": "v3.0.0", "additional": [] }, { "type": "api", "label": "taco_server", "repo": "phovea/phovea_server", - "branch": "develop", + "branch": "v4.0.1", "additional": [ { "name": "taco_server", "repo": "Caleydo/taco_server", - "branch": "develop" + "branch": "v3.0.0" }, { "name": "phovea_data_hdf", "repo": "phovea/phovea_data_hdf", - "branch": "develop" + "branch": "v4.0.0" } ], "data": [