Browse Source

Add support for `Dockerfile` (#98)

* add support for `Dockerfile`

* add dockerfile `EXPOSE` validation
master
Guillermo Rauch 9 years ago
committed by GitHub
parent
commit
a23f147b48
  1. 80
      bin/now-deploy
  2. 2
      gulpfile.babel.js
  3. 48
      lib/get-files.js
  4. 144
      lib/index.js
  5. 30
      lib/utils/prompt-options.js
  6. 1
      package.json

80
bin/now-deploy

@ -3,6 +3,7 @@ import Progress from 'progress';
import copy from '../lib/copy'; import copy from '../lib/copy';
import { resolve } from 'path'; import { resolve } from 'path';
import login from '../lib/login'; import login from '../lib/login';
import { stat } from 'fs-promise';
import * as cfg from '../lib/cfg'; import * as cfg from '../lib/cfg';
import { version } from '../../package'; import { version } from '../../package';
import Logger from '../lib/build-logger'; import Logger from '../lib/build-logger';
@ -11,11 +12,12 @@ import chalk from 'chalk';
import minimist from 'minimist'; import minimist from 'minimist';
import Now from '../lib'; import Now from '../lib';
import toHumanPath from '../lib/utils/to-human-path'; import toHumanPath from '../lib/utils/to-human-path';
import promptOptions from '../lib/utils/prompt-options';
import ms from 'ms'; import ms from 'ms';
import { handleError, error } from '../lib/error'; import { handleError, error } from '../lib/error';
const argv = minimist(process.argv.slice(2), { const argv = minimist(process.argv.slice(2), {
boolean: ['help', 'version', 'debug', 'force', 'login', 'no-clipboard', 'forward-npm'], boolean: ['help', 'version', 'debug', 'force', 'login', 'no-clipboard', 'forward-npm', 'docker', 'npm'],
alias: { alias: {
help: 'h', help: 'h',
debug: 'd', debug: 'd',
@ -143,16 +145,92 @@ async function sync (token) {
console.log(`> Deploying ${chalk.bold(toHumanPath(path))}`); console.log(`> Deploying ${chalk.bold(toHumanPath(path))}`);
} }
try {
await stat(path);
} catch (err) {
error(`Could not read directory ${chalk.bold(path)}`);
process.exit(1);
}
let deploymentType, hasPackage, hasDockerfile;
if (argv.docker) {
if (debug) {
console.log(`> [debug] Forcing \`deploymentType\` = \`docker\``);
}
deploymentType = 'docker';
} else {
if (argv.npm) {
deploymentType = 'npm';
} else {
try {
await stat(resolve(path, 'package.json'));
} catch (err) {
hasPackage = true;
}
[hasPackage, hasDockerfile] = await Promise.all([
await (async () => {
try {
await stat(resolve(path, 'package.json'));
} catch (err) {
return false;
}
return true;
})(),
await (async () => {
try {
await stat(resolve(path, 'Dockerfile'));
} catch (err) {
return false;
}
return true;
})()
]);
if (hasPackage && hasDockerfile) {
if (debug) console.log('[debug] multiple manifests found, disambiguating');
if (isTTY) {
try {
console.log(`> Two manifests found. Press [${chalk.bold('n')}] to deploy or re-run with --flag`);
deploymentType = await promptOptions([
['npm', `${chalk.bold('package.json')}\t${chalk.gray(' --npm')} `],
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `]
]);
} catch (err) {
error(err.message);
process.exit(1);
}
} else {
error('Ambiguous deployment (`package.json` and `Dockerfile` found). ' +
'Please supply `--npm` or `--docker` to disambiguate.');
}
} else if (hasPackage) {
if (debug) console.log('[debug] `package.json` found, assuming `deploymentType` = `npm`');
deploymentType = 'npm';
} else if (hasDockerFile) {
if (debug) console.log('[debug] `Dockerfile` found, assuming `deploymentType` = `docker`');
deploymentType = 'docker';
} else {
error(`Could not read directory ${chalk.bold(path)}`);
process.exit(1);
}
}
}
const now = new Now(apiUrl, token, { debug }); const now = new Now(apiUrl, token, { debug });
try { try {
await now.create(path, { await now.create(path, {
deploymentType,
forceNew, forceNew,
forceSync, forceSync,
forwardNpm: alwaysForwardNpm || forwardNpm, forwardNpm: alwaysForwardNpm || forwardNpm,
quiet quiet
}); });
} catch (err) { } catch (err) {
if (debug) console.log(`> [debug] error: ${err.stack}`);
handleError(err); handleError(err);
process.exit(1); process.exit(1);
} }

2
gulpfile.babel.js

@ -32,7 +32,7 @@ gulp.task('enclose', ['compile'], (cb) => {
], cb); ], cb);
}); });
gulp.task('watch-lib', () => gulp.watch('lib/*.js', ['compile-lib'])); gulp.task('watch-lib', () => gulp.watch('lib/**/*.js', ['compile-lib']));
gulp.task('watch-bin', () => gulp.watch('bin/*', ['compile-bin'])); gulp.task('watch-bin', () => gulp.watch('bin/*', ['compile-bin']));
gulp.task('clean', () => del(['build'])); gulp.task('clean', () => del(['build']));

48
lib/get-files.js

@ -8,7 +8,7 @@ import { stat, readdir, readFile } from 'fs-promise';
/** /**
* Returns a list of files in the given * Returns a list of files in the given
* directory that are subject to be * directory that are subject to be
* synchronized. * synchronized for npm.
* *
* @param {String} full path to directory * @param {String} full path to directory
* @param {String} contents of `package.json` to avoid lookup * @param {String} contents of `package.json` to avoid lookup
@ -18,26 +18,46 @@ import { stat, readdir, readFile } from 'fs-promise';
* @return {Array} comprehensive list of paths to sync * @return {Array} comprehensive list of paths to sync
*/ */
export default async function getFiles (path, pkg, { limit = null, debug = false }) { export default async function getFiles (path, pkg, {
if (!pkg) { deploymentType = 'npm',
limit = null,
debug = false
}) {
if (!pkg && 'npm' === deploymentType) {
const pkgPath = resolve(path, 'package.json'); const pkgPath = resolve(path, 'package.json');
const pkgData = await readFile(pkgPath, 'utf8'); const pkgData = await readFile(pkgPath, 'utf8');
pkg = JSON.parse(pkgData); pkg = JSON.parse(pkgData);
} }
let search = (pkg.files || ['.']).concat('package.json'); let search = (pkg ? pkg.files || ['.'] : []).concat(
if (pkg.main) search = search.concat(pkg.main); 'npm' === deploymentType
? 'package.json'
: 'Dockerfile'
);
if ('npm' === deploymentType && pkg.main) {
search = search.concat(pkg.main);
}
search = search.map((file) => asAbsolute(file, path)); search = search.map((file) => asAbsolute(file, path));
// compile list of ignored patterns and files // compile list of ignored patterns and files
const npmIgnore = await maybeRead(resolve(path, '.npmignore')); let ignored;
const gitIgnore = npmIgnore if ('npm' === deploymentType) {
? '' const npmIgnore = await maybeRead(resolve(path, '.npmignore'));
: (await maybeRead(resolve(path, '.gitignore'))); const gitIgnore = npmIgnore
const ignored = unique(IGNORED ? ''
.concat(gitIgnore.split('\n').filter(invalidFilter)) : (await maybeRead(resolve(path, '.gitignore')));
.concat(npmIgnore.split('\n').filter(invalidFilter))) ignored = unique(IGNORED
.map((file) => resolve(path, file)); .concat(gitIgnore.split('\n').filter(invalidFilter))
.concat(npmIgnore.split('\n').filter(invalidFilter)));
} else {
const dockerIgnore = await maybeRead(resolve(path, '.dockerignore'));
ignored = unique(IGNORED
.concat(dockerIgnore.split('\n').filter(invalidFilter)));
}
ignored = ignored.map((file) => resolve(path, file));
// get files // get files
return unique((await explode(search, ignored, { limit, debug }))); return unique((await explode(search, ignored, { limit, debug })));
@ -51,6 +71,8 @@ export default async function getFiles (path, pkg, { limit = null, debug = false
* @return {Function} filter fn * @return {Function} filter fn
*/ */
// TODO: revisit this to support the entire
// .dockerignore format like the `!` prefix
const isIgnored = (file, ignored) => { const isIgnored = (file, ignored) => {
return ignored.some((test) => { return ignored.some((test) => {
// test that the target file is not under // test that the target file is not under

144
lib/index.js

@ -9,9 +9,10 @@ import EventEmitter from 'events';
import { basename, resolve as resolvePath } from 'path'; import { basename, resolve as resolvePath } from 'path';
import { homedir } from 'os'; import { homedir } from 'os';
import { parse as parseIni } from 'ini'; import { parse as parseIni } from 'ini';
import { stat, readFile } from 'fs-promise'; import { readFile } from 'fs-promise';
import resumer from 'resumer'; import resumer from 'resumer';
import splitArray from 'split-array'; import splitArray from 'split-array';
import { parse as parseDockerfile } from 'docker-file-parser';
// how many concurrent HTTP/2 stream uploads // how many concurrent HTTP/2 stream uploads
const MAX_CONCURRENT = 10; const MAX_CONCURRENT = 10;
@ -30,47 +31,108 @@ export default class Now extends EventEmitter {
this._onRetry = this._onRetry.bind(this); this._onRetry = this._onRetry.bind(this);
} }
async create (path, { forceNew, forceSync, forwardNpm, quiet = false }) { async create (path, {
quiet = false,
forceNew = false,
forceSync = false,
forwardNpm = false,
deploymentType = 'npm'
}) {
this._path = path; this._path = path;
try { let pkg = {};
await stat(path); let name, description;
} catch (err) {
const e = new Error(`Could not read directory ${path}.`);
e.userError = true;
throw e;
}
let pkg;
try {
pkg = await readFile(resolvePath(path, 'package.json'));
pkg = JSON.parse(pkg);
} catch (err) {
const e = Error(`Failed to read JSON in "${path}/package.json"`);
e.userError = true;
throw e;
}
if (null == pkg.name || 'string' !== typeof pkg.name) { if ('npm' === deploymentType) {
const e = Error('Missing or invalid `name` in `package.json`.'); try {
e.userError = true; pkg = await readFile(resolvePath(path, 'package.json'));
throw e; pkg = JSON.parse(pkg);
} } catch (err) {
const e = Error(`Failed to read JSON in "${path}/package.json"`);
e.userError = true;
throw e;
}
if (!pkg.scripts || (!pkg.scripts.start && !pkg.scripts['now-start'])) {
const e = Error('Missing `start` (or `now-start`) script in `package.json`. ' +
'See: https://docs.npmjs.com/cli/start.');
e.userError = true;
throw e;
}
if (null == pkg.name || 'string' !== typeof pkg.name) {
name = basename(path);
if (!quiet) console.log(`> No \`name\` in \`package.json\`, using ${chalk.bold(name)}`);
} else {
name = pkg.name;
}
if (!pkg.scripts || (!pkg.scripts.start && !pkg.scripts['now-start'])) { description = pkg.description;
const e = Error('Missing `start` (or `now-start`) script in `package.json`. ' + } else if ('docker' === deploymentType) {
'See: https://docs.npmjs.com/cli/start.'); let docker;
e.userError = true; try {
throw e; const dockerfile = await readFile(resolvePath(path, 'Dockerfile'), 'utf8');
docker = parseDockerfile(dockerfile);
} catch (err) {
const e = Error(`Failed to parse "${path}/Dockerfile"`);
e.userError = true;
throw e;
}
if (!docker.length) {
const e = Error('No commands found in `Dockerfile`');
e.userError = true;
throw e;
}
if (!docker.some((cmd) => 'RUN' === cmd.name || 'CMD' === cmd.name)) {
const e = Error('No `RUN` or `CMD` found in `Dockerfile`. ' +
'See: https://docs.docker.com/engine/reference/builder/#/run');
e.userError = true;
throw e;
}
if (!docker.some((cmd) => 'EXPOSE' === cmd.name)) {
const e = Error('No `EXPOSE` found in `Dockerfile`. A port must be supplied. ' +
'See: https://docs.docker.com/engine/reference/builder/#/expose');
e.userError = true;
throw e;
}
const labels = {};
docker
.filter(cmd => 'LABEL' === cmd.name)
.forEach(({ args }) => {
for (let key in args) {
// unescape and convert into string
try {
labels[key] = JSON.parse(args[key]);
} catch (err) {
const e = Error(`Error parsing value for LABEL ${key} in \`Dockerfile\``);
e.userError = true;
throw e;
}
}
});
if (null == labels.name) {
name = basename(path);
if (!quiet) console.log(`> No \`name\` LABEL in \`Dockerfile\`, using ${chalk.bold(name)}`);
} else {
name = labels.name;
}
description = labels.description;
} }
const nowProperties = pkg.now || {}; const nowProperties = pkg ? pkg.now || {} : {};
forwardNpm = forwardNpm || nowProperties['forwardNpm']; forwardNpm = forwardNpm || nowProperties['forwardNpm'];
// Read .npmrc // Read .npmrc
let npmrc = {}; let npmrc = {};
let authToken; let authToken;
if (forwardNpm) { if ('npm' === deploymentType && forwardNpm) {
try { try {
npmrc = await readFile(resolvePath(path, '.npmrc'), 'utf8'); npmrc = await readFile(resolvePath(path, '.npmrc'), 'utf8');
npmrc = parseIni(npmrc); npmrc = parseIni(npmrc);
@ -91,7 +153,10 @@ export default class Now extends EventEmitter {
} }
if (this._debug) console.time('> [debug] Getting files'); if (this._debug) console.time('> [debug] Getting files');
const files = await getFiles(path, pkg, { debug: this._debug }); const files = await getFiles(path, pkg, {
deploymentType,
debug: this._debug
});
if (this._debug) console.timeEnd('> [debug] Getting files'); if (this._debug) console.timeEnd('> [debug] Getting files');
if (this._debug) console.time('> [debug] Computing hashes'); if (this._debug) console.time('> [debug] Computing hashes');
@ -109,17 +174,18 @@ export default class Now extends EventEmitter {
body: { body: {
forceNew, forceNew,
forceSync, forceSync,
name: pkg.name || basename(path), name: name,
description: pkg.description, description: description,
deploymentType,
registryAuthToken: authToken, registryAuthToken: authToken,
// Flatten the array to contain files to sync where each nested input // Flatten the array to contain files to sync where each nested input
// array has a group of files with the same sha but different path // array has a group of files with the same sha but different path
files: Array.prototype.concat.apply([], Array.from(this._files).map(([sha, { data, names }]) => { files: Array.prototype.concat.apply([], Array.from(this._files).map(([sha, { data, names }]) => {
return names.map((name) => { return names.map((n) => {
return { return {
sha, sha,
size: data.length, size: data.length,
file: toRelative(name, this._path) file: toRelative(n, this._path)
}; };
}); });
})), })),
@ -150,12 +216,12 @@ export default class Now extends EventEmitter {
deployment.warnings.forEach((warning) => { deployment.warnings.forEach((warning) => {
if ('size_limit_exceeded' === warning.reason) { if ('size_limit_exceeded' === warning.reason) {
const { sha, limit } = warning; const { sha, limit } = warning;
const name = hashes.get(sha).names.pop(); const n = hashes.get(sha).names.pop();
console.error('> \u001b[31mWarning!\u001b[39m Skipping file %s (size exceeded %s)', console.error('> \u001b[31mWarning!\u001b[39m Skipping file %s (size exceeded %s)',
name, n,
bytes(limit) bytes(limit)
); );
hashes.get(sha).names.unshift(name); // move name (hack, if duplicate matches we report them in order) hashes.get(sha).names.unshift(n); // move name (hack, if duplicate matches we report them in order)
sizeExceeded++; sizeExceeded++;
} else if ('node_version_not_found' === warning.reason) { } else if ('node_version_not_found' === warning.reason) {
const { wanted, used } = warning; const { wanted, used } = warning;
@ -174,7 +240,7 @@ export default class Now extends EventEmitter {
} }
} }
if (!quiet) { if (!quiet && deployment.nodeVersion) {
if (engines && engines.node) { if (engines && engines.node) {
if (missingVersion) { if (missingVersion) {
console.log(`> Using Node.js ${chalk.bold(deployment.nodeVersion)} (default)`); console.log(`> Using Node.js ${chalk.bold(deployment.nodeVersion)} (default)`);

30
lib/utils/prompt-options.js

@ -0,0 +1,30 @@
import chalk from 'chalk';
export default function (opts) {
return new Promise((resolve, reject) => {
opts.forEach(([, text], i) => {
console.log(`${chalk.gray('>')} [${chalk.bold(i + 1)}] ${text}`);
});
const ondata = (v) => {
const s = v.toString();
if ('\u0003' === s) {
cleanup();
reject(new Error('Aborted'));
return;
}
const n = Number(s);
if (opts[n - 1]) {
cleanup();
resolve(opts[n - 1][0]);
}
};
const cleanup = () => {
process.stdin.setRawMode(false);
process.stdin.removeListener('data', ondata);
};
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('data', ondata);
});
}

1
package.json

@ -57,6 +57,7 @@
"chalk": "1.1.3", "chalk": "1.1.3",
"copy-paste": "1.2.0", "copy-paste": "1.2.0",
"cross-spawn-async": "2.2.4", "cross-spawn-async": "2.2.4",
"docker-file-parser": "0.1.0",
"domain-regex": "0.0.1", "domain-regex": "0.0.1",
"email-prompt": "0.1.8", "email-prompt": "0.1.8",
"email-validator": "1.0.4", "email-validator": "1.0.4",

Loading…
Cancel
Save