|
|
@ -9,9 +9,10 @@ import EventEmitter from 'events'; |
|
|
|
import { basename, resolve as resolvePath } from 'path'; |
|
|
|
import { homedir } from 'os'; |
|
|
|
import { parse as parseIni } from 'ini'; |
|
|
|
import { stat, readFile } from 'fs-promise'; |
|
|
|
import { readFile } from 'fs-promise'; |
|
|
|
import resumer from 'resumer'; |
|
|
|
import splitArray from 'split-array'; |
|
|
|
import { parse as parseDockerfile } from 'docker-file-parser'; |
|
|
|
|
|
|
|
// how many concurrent HTTP/2 stream uploads
|
|
|
|
const MAX_CONCURRENT = 10; |
|
|
@ -30,47 +31,108 @@ export default class Now extends EventEmitter { |
|
|
|
this._onRetry = this._onRetry.bind(this); |
|
|
|
} |
|
|
|
|
|
|
|
async create (path, { forceNew, forceSync, forwardNpm, quiet = false }) { |
|
|
|
async create (path, { |
|
|
|
quiet = false, |
|
|
|
forceNew = false, |
|
|
|
forceSync = false, |
|
|
|
forwardNpm = false, |
|
|
|
deploymentType = 'npm' |
|
|
|
}) { |
|
|
|
this._path = path; |
|
|
|
|
|
|
|
try { |
|
|
|
await stat(path); |
|
|
|
} catch (err) { |
|
|
|
const e = new Error(`Could not read directory ${path}.`); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
let pkg; |
|
|
|
try { |
|
|
|
pkg = await readFile(resolvePath(path, 'package.json')); |
|
|
|
pkg = JSON.parse(pkg); |
|
|
|
} catch (err) { |
|
|
|
const e = Error(`Failed to read JSON in "${path}/package.json"`); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
let pkg = {}; |
|
|
|
let name, description; |
|
|
|
|
|
|
|
if (null == pkg.name || 'string' !== typeof pkg.name) { |
|
|
|
const e = Error('Missing or invalid `name` in `package.json`.'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
if ('npm' === deploymentType) { |
|
|
|
try { |
|
|
|
pkg = await readFile(resolvePath(path, 'package.json')); |
|
|
|
pkg = JSON.parse(pkg); |
|
|
|
} catch (err) { |
|
|
|
const e = Error(`Failed to read JSON in "${path}/package.json"`); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
if (!pkg.scripts || (!pkg.scripts.start && !pkg.scripts['now-start'])) { |
|
|
|
const e = Error('Missing `start` (or `now-start`) script in `package.json`. ' + |
|
|
|
'See: https://docs.npmjs.com/cli/start.'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
if (null == pkg.name || 'string' !== typeof pkg.name) { |
|
|
|
name = basename(path); |
|
|
|
if (!quiet) console.log(`> No \`name\` in \`package.json\`, using ${chalk.bold(name)}`); |
|
|
|
} else { |
|
|
|
name = pkg.name; |
|
|
|
} |
|
|
|
|
|
|
|
if (!pkg.scripts || (!pkg.scripts.start && !pkg.scripts['now-start'])) { |
|
|
|
const e = Error('Missing `start` (or `now-start`) script in `package.json`. ' + |
|
|
|
'See: https://docs.npmjs.com/cli/start.'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
description = pkg.description; |
|
|
|
} else if ('docker' === deploymentType) { |
|
|
|
let docker; |
|
|
|
try { |
|
|
|
const dockerfile = await readFile(resolvePath(path, 'Dockerfile'), 'utf8'); |
|
|
|
docker = parseDockerfile(dockerfile); |
|
|
|
} catch (err) { |
|
|
|
const e = Error(`Failed to parse "${path}/Dockerfile"`); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
if (!docker.length) { |
|
|
|
const e = Error('No commands found in `Dockerfile`'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
if (!docker.some((cmd) => 'RUN' === cmd.name || 'CMD' === cmd.name)) { |
|
|
|
const e = Error('No `RUN` or `CMD` found in `Dockerfile`. ' + |
|
|
|
'See: https://docs.docker.com/engine/reference/builder/#/run'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
if (!docker.some((cmd) => 'EXPOSE' === cmd.name)) { |
|
|
|
const e = Error('No `EXPOSE` found in `Dockerfile`. A port must be supplied. ' + |
|
|
|
'See: https://docs.docker.com/engine/reference/builder/#/expose'); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
|
|
|
|
const labels = {}; |
|
|
|
docker |
|
|
|
.filter(cmd => 'LABEL' === cmd.name) |
|
|
|
.forEach(({ args }) => { |
|
|
|
for (let key in args) { |
|
|
|
// unescape and convert into string
|
|
|
|
try { |
|
|
|
labels[key] = JSON.parse(args[key]); |
|
|
|
} catch (err) { |
|
|
|
const e = Error(`Error parsing value for LABEL ${key} in \`Dockerfile\``); |
|
|
|
e.userError = true; |
|
|
|
throw e; |
|
|
|
} |
|
|
|
} |
|
|
|
}); |
|
|
|
|
|
|
|
if (null == labels.name) { |
|
|
|
name = basename(path); |
|
|
|
if (!quiet) console.log(`> No \`name\` LABEL in \`Dockerfile\`, using ${chalk.bold(name)}`); |
|
|
|
} else { |
|
|
|
name = labels.name; |
|
|
|
} |
|
|
|
|
|
|
|
description = labels.description; |
|
|
|
} |
|
|
|
|
|
|
|
const nowProperties = pkg.now || {}; |
|
|
|
const nowProperties = pkg ? pkg.now || {} : {}; |
|
|
|
|
|
|
|
forwardNpm = forwardNpm || nowProperties['forwardNpm']; |
|
|
|
|
|
|
|
// Read .npmrc
|
|
|
|
let npmrc = {}; |
|
|
|
let authToken; |
|
|
|
if (forwardNpm) { |
|
|
|
if ('npm' === deploymentType && forwardNpm) { |
|
|
|
try { |
|
|
|
npmrc = await readFile(resolvePath(path, '.npmrc'), 'utf8'); |
|
|
|
npmrc = parseIni(npmrc); |
|
|
@ -91,7 +153,10 @@ export default class Now extends EventEmitter { |
|
|
|
} |
|
|
|
|
|
|
|
if (this._debug) console.time('> [debug] Getting files'); |
|
|
|
const files = await getFiles(path, pkg, { debug: this._debug }); |
|
|
|
const files = await getFiles(path, pkg, { |
|
|
|
deploymentType, |
|
|
|
debug: this._debug |
|
|
|
}); |
|
|
|
if (this._debug) console.timeEnd('> [debug] Getting files'); |
|
|
|
|
|
|
|
if (this._debug) console.time('> [debug] Computing hashes'); |
|
|
@ -109,17 +174,18 @@ export default class Now extends EventEmitter { |
|
|
|
body: { |
|
|
|
forceNew, |
|
|
|
forceSync, |
|
|
|
name: pkg.name || basename(path), |
|
|
|
description: pkg.description, |
|
|
|
name: name, |
|
|
|
description: description, |
|
|
|
deploymentType, |
|
|
|
registryAuthToken: authToken, |
|
|
|
// Flatten the array to contain files to sync where each nested input
|
|
|
|
// array has a group of files with the same sha but different path
|
|
|
|
files: Array.prototype.concat.apply([], Array.from(this._files).map(([sha, { data, names }]) => { |
|
|
|
return names.map((name) => { |
|
|
|
return names.map((n) => { |
|
|
|
return { |
|
|
|
sha, |
|
|
|
size: data.length, |
|
|
|
file: toRelative(name, this._path) |
|
|
|
file: toRelative(n, this._path) |
|
|
|
}; |
|
|
|
}); |
|
|
|
})), |
|
|
@ -150,12 +216,12 @@ export default class Now extends EventEmitter { |
|
|
|
deployment.warnings.forEach((warning) => { |
|
|
|
if ('size_limit_exceeded' === warning.reason) { |
|
|
|
const { sha, limit } = warning; |
|
|
|
const name = hashes.get(sha).names.pop(); |
|
|
|
const n = hashes.get(sha).names.pop(); |
|
|
|
console.error('> \u001b[31mWarning!\u001b[39m Skipping file %s (size exceeded %s)', |
|
|
|
name, |
|
|
|
n, |
|
|
|
bytes(limit) |
|
|
|
); |
|
|
|
hashes.get(sha).names.unshift(name); // move name (hack, if duplicate matches we report them in order)
|
|
|
|
hashes.get(sha).names.unshift(n); // move name (hack, if duplicate matches we report them in order)
|
|
|
|
sizeExceeded++; |
|
|
|
} else if ('node_version_not_found' === warning.reason) { |
|
|
|
const { wanted, used } = warning; |
|
|
@ -174,7 +240,7 @@ export default class Now extends EventEmitter { |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
if (!quiet) { |
|
|
|
if (!quiet && deployment.nodeVersion) { |
|
|
|
if (engines && engines.node) { |
|
|
|
if (missingVersion) { |
|
|
|
console.log(`> Using Node.js ${chalk.bold(deployment.nodeVersion)} (default)`); |
|
|
|