Leo Lamprecht
7 years ago
21 changed files with 44 additions and 2493 deletions
@ -1,141 +0,0 @@ |
|||||
// Native
|
|
||||
const os = require('os') |
|
||||
|
|
||||
// Packages
|
|
||||
const { stringify: stringifyQuery } = require('querystring') |
|
||||
const chalk = require('chalk') |
|
||||
const fetch = require('node-fetch') |
|
||||
const { validate } = require('email-validator') |
|
||||
const readEmail = require('email-prompt') |
|
||||
const ora = require('ora') |
|
||||
|
|
||||
// Ours
|
|
||||
const pkg = require('../../../../util/pkg') |
|
||||
const ua = require('./ua') |
|
||||
const cfg = require('./cfg') |
|
||||
|
|
||||
async function getVerificationData(url, email) { |
|
||||
const tokenName = `Now CLI ${os.platform()}-${os.arch()} ${pkg.version} (${os.hostname()})` |
|
||||
const data = JSON.stringify({ email, tokenName }) |
|
||||
const res = await fetch(`${url}/now/registration`, { |
|
||||
method: 'POST', |
|
||||
headers: { |
|
||||
'Content-Type': 'application/json', |
|
||||
'Content-Length': Buffer.byteLength(data), |
|
||||
'User-Agent': ua |
|
||||
}, |
|
||||
body: data |
|
||||
}) |
|
||||
|
|
||||
const body = await res.json() |
|
||||
if (res.status !== 200) { |
|
||||
throw new Error( |
|
||||
`Verification error: ${res.status} – ${JSON.stringify(body)}` |
|
||||
) |
|
||||
} |
|
||||
return body |
|
||||
} |
|
||||
|
|
||||
async function verify(url, email, verificationToken) { |
|
||||
const query = { |
|
||||
email, |
|
||||
token: verificationToken |
|
||||
} |
|
||||
|
|
||||
const res = await fetch( |
|
||||
`${url}/now/registration/verify?${stringifyQuery(query)}`, |
|
||||
{ |
|
||||
headers: { 'User-Agent': ua } |
|
||||
} |
|
||||
) |
|
||||
const body = await res.json() |
|
||||
return body.token |
|
||||
} |
|
||||
|
|
||||
function sleep(ms) { |
|
||||
return new Promise(resolve => { |
|
||||
setTimeout(resolve, ms) |
|
||||
}) |
|
||||
} |
|
||||
|
|
||||
async function register(url, { retryEmail = false } = {}) { |
|
||||
let email |
|
||||
try { |
|
||||
email = await readEmail({ invalid: retryEmail }) |
|
||||
} catch (err) { |
|
||||
process.stdout.write('\n') |
|
||||
throw err |
|
||||
} |
|
||||
|
|
||||
process.stdout.write('\n') |
|
||||
|
|
||||
if (!validate(email)) { |
|
||||
return register(url, { retryEmail: true }) |
|
||||
} |
|
||||
|
|
||||
const { token, securityCode } = await getVerificationData(url, email) |
|
||||
console.log( |
|
||||
`> Please follow the link sent to ${chalk.bold(email)} to log in.` |
|
||||
) |
|
||||
|
|
||||
if (securityCode) { |
|
||||
console.log( |
|
||||
`> Verify that the provided security code in the email matches ${chalk.cyan( |
|
||||
chalk.bold(securityCode) |
|
||||
)}.` |
|
||||
) |
|
||||
} |
|
||||
|
|
||||
process.stdout.write('\n') |
|
||||
|
|
||||
const spinner = ora({ |
|
||||
text: 'Waiting for confirmation...' |
|
||||
}).start() |
|
||||
|
|
||||
let final |
|
||||
|
|
||||
/* eslint-disable no-await-in-loop */ |
|
||||
do { |
|
||||
await sleep(2500) |
|
||||
|
|
||||
try { |
|
||||
final = await verify(url, email, token) |
|
||||
} catch (err) {} |
|
||||
} while (!final) |
|
||||
/* eslint-enable no-await-in-loop */ |
|
||||
|
|
||||
let user |
|
||||
try { |
|
||||
user = (await (await fetch(`${url}/www/user`, { |
|
||||
headers: { |
|
||||
Authorization: `Bearer ${final}` |
|
||||
} |
|
||||
})).json()).user |
|
||||
} catch (err) { |
|
||||
spinner.stop() |
|
||||
throw new Error(`Couldn't retrieve user details ${err.message}`) |
|
||||
} |
|
||||
|
|
||||
spinner.text = 'Confirmed email address!' |
|
||||
spinner.stopAndPersist('✔') |
|
||||
|
|
||||
process.stdout.write('\n') |
|
||||
|
|
||||
return { |
|
||||
token: final, |
|
||||
user: { |
|
||||
uid: user.uid, |
|
||||
username: user.username, |
|
||||
email: user.email |
|
||||
}, |
|
||||
lastUpdate: Date.now() |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
module.exports = async function(url) { |
|
||||
const loginData = await register(url) |
|
||||
await cfg.merge(loginData) |
|
||||
await cfg.remove('currentTeam') // Make sure to logout the team too
|
|
||||
await cfg.remove('email') // Remove old schema from previus versions
|
|
||||
return loginData.token |
|
||||
} |
|
@ -1 +0,0 @@ |
|||||
module.exports = require('./legacy/deploy') |
|
@ -1,83 +0,0 @@ |
|||||
// Native
|
|
||||
const { parse } = require('url') |
|
||||
const http = require('http') |
|
||||
const https = require('https') |
|
||||
|
|
||||
// Packages
|
|
||||
const fetch = require('node-fetch') |
|
||||
|
|
||||
/** |
|
||||
* Returns a `fetch` version with a similar |
|
||||
* API to the browser's configured with a |
|
||||
* HTTP2 agent. |
|
||||
* |
|
||||
* It encodes `body` automatically as JSON. |
|
||||
* |
|
||||
* @param {String} host |
|
||||
* @return {Function} fetch |
|
||||
*/ |
|
||||
|
|
||||
module.exports = class Agent { |
|
||||
constructor(url, { tls = true, debug } = {}) { |
|
||||
this._url = url |
|
||||
const parsed = parse(url) |
|
||||
this._protocol = parsed.protocol |
|
||||
this._debug = debug |
|
||||
if (tls) { |
|
||||
this._initAgent() |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
_initAgent() { |
|
||||
const module = this._protocol === 'https:' ? https : http |
|
||||
this._agent = new module.Agent({ |
|
||||
keepAlive: true, |
|
||||
keepAliveMsecs: 10000, |
|
||||
maxSockets: 8 |
|
||||
}).on('error', err => this._onError(err, this._agent)) |
|
||||
} |
|
||||
|
|
||||
_onError(err, agent) { |
|
||||
if (this._debug) { |
|
||||
console.log(`> [debug] agent connection error ${err}\n${err.stack}`) |
|
||||
} |
|
||||
if (this._agent === agent) { |
|
||||
this._agent = null |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
fetch(path, opts = {}) { |
|
||||
if (!this._agent) { |
|
||||
if (this._debug) { |
|
||||
console.log('> [debug] re-initializing agent') |
|
||||
} |
|
||||
this._initAgent() |
|
||||
} |
|
||||
|
|
||||
const { body } = opts |
|
||||
if (this._agent) { |
|
||||
opts.agent = this._agent |
|
||||
} |
|
||||
|
|
||||
if (body && typeof body === 'object' && typeof body.pipe !== 'function') { |
|
||||
opts.headers['Content-Type'] = 'application/json' |
|
||||
opts.body = JSON.stringify(body) |
|
||||
} |
|
||||
|
|
||||
if (opts.body && typeof body.pipe !== 'function') { |
|
||||
opts.headers['Content-Length'] = Buffer.byteLength(opts.body) |
|
||||
} |
|
||||
|
|
||||
return fetch(this._url + path, opts) |
|
||||
} |
|
||||
|
|
||||
close() { |
|
||||
if (this._debug) { |
|
||||
console.log('> [debug] closing agent') |
|
||||
} |
|
||||
|
|
||||
if (this._agent) { |
|
||||
this._agent.destroy() |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,138 +0,0 @@ |
|||||
// Native
|
|
||||
const EventEmitter = require('events') |
|
||||
|
|
||||
// Packages
|
|
||||
const io = require('socket.io-client') |
|
||||
const chalk = require('chalk') |
|
||||
|
|
||||
const { compare, deserialize } = require('./logs') |
|
||||
|
|
||||
module.exports = class Logger extends EventEmitter { |
|
||||
constructor(host, token, { debug = false, quiet = false } = {}) { |
|
||||
super() |
|
||||
this.host = host |
|
||||
this.token = token |
|
||||
this.debug = debug |
|
||||
this.quiet = quiet |
|
||||
|
|
||||
// ReadyState
|
|
||||
this.building = false |
|
||||
|
|
||||
this.socket = io(`https://io.now.sh/states?host=${host}&v=2`) |
|
||||
this.socket.once('error', this.onSocketError.bind(this)) |
|
||||
this.socket.on('auth', this.onAuth.bind(this)) |
|
||||
this.socket.on('state', this.onState.bind(this)) |
|
||||
this.socket.on('logs', this.onLog.bind(this)) |
|
||||
this.socket.on('backend', this.onComplete.bind(this)) |
|
||||
|
|
||||
// Log buffer
|
|
||||
this.buf = [] |
|
||||
this.printed = new Set() |
|
||||
} |
|
||||
|
|
||||
onAuth(callback) { |
|
||||
if (this.debug) { |
|
||||
console.log('> [debug] authenticate') |
|
||||
} |
|
||||
callback(this.token) |
|
||||
} |
|
||||
|
|
||||
onState(state) { |
|
||||
// Console.log(state)
|
|
||||
if (!state.id) { |
|
||||
console.error('> Deployment not found') |
|
||||
this.emit('error') |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
if (state.error) { |
|
||||
this.emit('error', state) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
if (state.backend) { |
|
||||
this.onComplete() |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
if (state.logs) { |
|
||||
state.logs.forEach(this.onLog, this) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
onLog(log) { |
|
||||
if (!this.building) { |
|
||||
if (!this.quiet) { |
|
||||
console.log('> Building') |
|
||||
} |
|
||||
this.building = true |
|
||||
} |
|
||||
|
|
||||
if (this.quiet) { |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
log = deserialize(log) |
|
||||
|
|
||||
const timer = setTimeout(() => { |
|
||||
this.buf.sort((a, b) => compare(a.log, b.log)) |
|
||||
const idx = this.buf.findIndex(b => b.log.id === log.id) + 1 |
|
||||
for (const b of this.buf.slice(0, idx)) { |
|
||||
clearTimeout(b.timer) |
|
||||
this.printLog(b.log) |
|
||||
} |
|
||||
this.buf = this.buf.slice(idx) |
|
||||
}, 500) |
|
||||
|
|
||||
this.buf.push({ log, timer }) |
|
||||
} |
|
||||
|
|
||||
onComplete() { |
|
||||
this.socket.disconnect() |
|
||||
|
|
||||
if (this.building) { |
|
||||
this.building = false |
|
||||
} |
|
||||
|
|
||||
this.buf.sort((a, b) => compare(a.log, b.log)) |
|
||||
|
|
||||
// Flush all buffer
|
|
||||
for (const b of this.buf) { |
|
||||
clearTimeout(b.timer) |
|
||||
this.printLog(b.log) |
|
||||
} |
|
||||
this.buf = [] |
|
||||
|
|
||||
this.emit('close') |
|
||||
} |
|
||||
|
|
||||
onSocketError(err) { |
|
||||
if (this.debug) { |
|
||||
console.log(`> [debug] Socket error ${err}\n${err.stack}`) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
printLog(log) { |
|
||||
if (this.printed.has(log.id)) return |
|
||||
|
|
||||
this.printed.add(log.id) |
|
||||
|
|
||||
const data = log.object ? JSON.stringify(log.object) : log.text |
|
||||
|
|
||||
if (log.type === 'command') { |
|
||||
console.log(`${chalk.gray('>')} ▲ ${data}`) |
|
||||
} else if (log.type === 'stderr') { |
|
||||
data.split('\n').forEach(v => { |
|
||||
if (v.length > 0) { |
|
||||
console.error(chalk.gray(`> ${v}`)) |
|
||||
} |
|
||||
}) |
|
||||
} else if (log.type === 'stdout') { |
|
||||
data.split('\n').forEach(v => { |
|
||||
if (v.length > 0) { |
|
||||
console.log(`${chalk.gray('>')} ${v}`) |
|
||||
} |
|
||||
}) |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,91 +0,0 @@ |
|||||
// Packages
|
|
||||
const ms = require('ms') |
|
||||
const chalk = require('chalk') |
|
||||
|
|
||||
const error = require('../../../util/output/error') |
|
||||
const info = require('../../../util/output/info') |
|
||||
|
|
||||
function handleError(err, { debug = false } = {}) { |
|
||||
// Coerce Strings to Error instances
|
|
||||
if (typeof err === 'string') { |
|
||||
err = new Error(err) |
|
||||
} |
|
||||
|
|
||||
if (debug) { |
|
||||
console.log(`> [debug] handling error: ${err.stack}`) |
|
||||
} |
|
||||
|
|
||||
if (err.status === 403) { |
|
||||
console.log( |
|
||||
error( |
|
||||
'Authentication error. Run `now -L` or `now --login` to log-in again.' |
|
||||
) |
|
||||
) |
|
||||
} else if (err.status === 429) { |
|
||||
if (err.retryAfter === 'never') { |
|
||||
console.log(error(err.message)) |
|
||||
} else if (err.retryAfter === null) { |
|
||||
console.log(error('Rate limit exceeded error. Please try later.')) |
|
||||
} else { |
|
||||
console.log( |
|
||||
error( |
|
||||
'Rate limit exceeded error. Try again in ' + |
|
||||
ms(err.retryAfter * 1000, { long: true }) + |
|
||||
', or upgrade your account by running ' + |
|
||||
`${chalk.gray('`')}${chalk.cyan('now upgrade')}${chalk.gray('`')}` |
|
||||
) |
|
||||
) |
|
||||
} |
|
||||
} else if (err.userError) { |
|
||||
console.log(error(err.message)) |
|
||||
} else if (err.status === 500) { |
|
||||
console.log(error('Unexpected server error. Please retry.')) |
|
||||
} else if (err.code === 'USER_ABORT') { |
|
||||
console.log(info('Aborted')) |
|
||||
} else { |
|
||||
console.log( |
|
||||
error(`Unexpected error. Please try again later. (${err.message})`) |
|
||||
) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
async function responseError(res) { |
|
||||
let message |
|
||||
let userError |
|
||||
|
|
||||
if (res.status >= 400 && res.status < 500) { |
|
||||
let body |
|
||||
|
|
||||
try { |
|
||||
body = await res.json() |
|
||||
} catch (err) { |
|
||||
body = {} |
|
||||
} |
|
||||
|
|
||||
// Some APIs wrongly return `err` instead of `error`
|
|
||||
message = (body.error || body.err || {}).message |
|
||||
userError = true |
|
||||
} else { |
|
||||
userError = false |
|
||||
} |
|
||||
|
|
||||
const err = new Error(message || 'Response error') |
|
||||
err.status = res.status |
|
||||
err.userError = userError |
|
||||
|
|
||||
if (res.status === 429) { |
|
||||
const retryAfter = res.headers.get('Retry-After') |
|
||||
|
|
||||
if (retryAfter) { |
|
||||
err.retryAfter = parseInt(retryAfter, 10) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
return err |
|
||||
} |
|
||||
|
|
||||
module.exports = { |
|
||||
handleError, |
|
||||
responseError, |
|
||||
error |
|
||||
} |
|
@ -1,385 +0,0 @@ |
|||||
// Native
|
|
||||
const { resolve } = require('path') |
|
||||
|
|
||||
// Packages
|
|
||||
const flatten = require('arr-flatten') |
|
||||
const unique = require('array-unique') |
|
||||
const ignore = require('ignore') |
|
||||
const _glob = require('glob') |
|
||||
const { stat, readdir, readFile } = require('fs-extra') |
|
||||
|
|
||||
// Ours
|
|
||||
const IGNORED = require('./ignored') |
|
||||
|
|
||||
const glob = async function(pattern, options) { |
|
||||
return new Promise((resolve, reject) => { |
|
||||
_glob(pattern, options, (error, files) => { |
|
||||
if (error) { |
|
||||
reject(error) |
|
||||
} else { |
|
||||
resolve(files) |
|
||||
} |
|
||||
}) |
|
||||
}) |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Remove leading `./` from the beginning of ignores |
|
||||
* because our parser doesn't like them :| |
|
||||
*/ |
|
||||
|
|
||||
const clearRelative = function(str) { |
|
||||
return str.replace(/(\n|^)\.\//g, '$1') |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Returns the contents of a file if it exists. |
|
||||
* |
|
||||
* @return {String} results or `''` |
|
||||
*/ |
|
||||
|
|
||||
const maybeRead = async function(path, default_ = '') { |
|
||||
try { |
|
||||
return await readFile(path, 'utf8') |
|
||||
} catch (err) { |
|
||||
return default_ |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Transform relative paths into absolutes, |
|
||||
* and maintains absolutes as such. |
|
||||
* |
|
||||
* @param {String} maybe relative path |
|
||||
* @param {String} parent full path |
|
||||
*/ |
|
||||
|
|
||||
const asAbsolute = function(path, parent) { |
|
||||
if (path[0] === '/') { |
|
||||
return path |
|
||||
} |
|
||||
|
|
||||
return resolve(parent, path) |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Returns a list of files in the given |
|
||||
* directory that are subject to be |
|
||||
* synchronized for static deployments. |
|
||||
* |
|
||||
* @param {String} full path to directory |
|
||||
* @param {Object} options: |
|
||||
* - `limit` {Number|null} byte limit |
|
||||
* - `debug` {Boolean} warn upon ignore |
|
||||
* @return {Array} comprehensive list of paths to sync |
|
||||
*/ |
|
||||
|
|
||||
async function staticFiles( |
|
||||
path, |
|
||||
nowConfig = {}, |
|
||||
{ limit = null, hasNowJson = false, debug = false } = {} |
|
||||
) { |
|
||||
const whitelist = nowConfig.files |
|
||||
|
|
||||
// The package.json `files` whitelist still
|
|
||||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|
||||
const search_ = whitelist || ['.'] |
|
||||
// Convert all filenames into absolute paths
|
|
||||
const search = Array.prototype.concat.apply( |
|
||||
[], |
|
||||
await Promise.all( |
|
||||
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true })) |
|
||||
) |
|
||||
) |
|
||||
|
|
||||
// Compile list of ignored patterns and files
|
|
||||
const gitIgnore = await maybeRead(resolve(path, '.gitignore')) |
|
||||
|
|
||||
const filter = ignore() |
|
||||
.add(IGNORED + '\n' + clearRelative(gitIgnore)) |
|
||||
.createFilter() |
|
||||
|
|
||||
const prefixLength = path.length + 1 |
|
||||
|
|
||||
// The package.json `files` whitelist still
|
|
||||
// honors npmignores: https://docs.npmjs.com/files/package.json#files
|
|
||||
// but we don't ignore if the user is explicitly listing files
|
|
||||
// under the now namespace, or using files in combination with gitignore
|
|
||||
const accepts = file => { |
|
||||
const relativePath = file.substr(prefixLength) |
|
||||
|
|
||||
if (relativePath === '') { |
|
||||
return true |
|
||||
} |
|
||||
|
|
||||
const accepted = filter(relativePath) |
|
||||
if (!accepted && debug) { |
|
||||
console.log('> [debug] ignoring "%s"', file) |
|
||||
} |
|
||||
return accepted |
|
||||
} |
|
||||
|
|
||||
// Locate files
|
|
||||
if (debug) { |
|
||||
console.time(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
const files = await explode(search, { |
|
||||
accepts, |
|
||||
limit, |
|
||||
debug |
|
||||
}) |
|
||||
|
|
||||
if (debug) { |
|
||||
console.timeEnd(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
if (hasNowJson) { |
|
||||
files.push(asAbsolute('now.json', path)) |
|
||||
} |
|
||||
|
|
||||
// Get files
|
|
||||
return unique(files) |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Returns a list of files in the given |
|
||||
* directory that are subject to be |
|
||||
* synchronized for npm. |
|
||||
* |
|
||||
* @param {String} full path to directory |
|
||||
* @param {String} contents of `package.json` to avoid lookup |
|
||||
* @param {Object} options: |
|
||||
* - `limit` {Number|null} byte limit |
|
||||
* - `debug` {Boolean} warn upon ignore |
|
||||
* @return {Array} comprehensive list of paths to sync |
|
||||
*/ |
|
||||
|
|
||||
async function npm( |
|
||||
path, |
|
||||
pkg = {}, |
|
||||
nowConfig = {}, |
|
||||
{ limit = null, hasNowJson = false, debug = false } = {} |
|
||||
) { |
|
||||
const whitelist = nowConfig.files || pkg.files || (pkg.now && pkg.now.files) |
|
||||
|
|
||||
// The package.json `files` whitelist still
|
|
||||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|
||||
const search_ = whitelist || ['.'] |
|
||||
// Convert all filenames into absolute paths
|
|
||||
const search = Array.prototype.concat.apply( |
|
||||
[], |
|
||||
await Promise.all( |
|
||||
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true })) |
|
||||
) |
|
||||
) |
|
||||
|
|
||||
// Compile list of ignored patterns and files
|
|
||||
const npmIgnore = await maybeRead(resolve(path, '.npmignore'), null) |
|
||||
const gitIgnore = |
|
||||
npmIgnore === null ? await maybeRead(resolve(path, '.gitignore')) : null |
|
||||
|
|
||||
const filter = ignore() |
|
||||
.add( |
|
||||
IGNORED + '\n' + clearRelative(npmIgnore === null ? gitIgnore : npmIgnore) |
|
||||
) |
|
||||
.createFilter() |
|
||||
|
|
||||
const prefixLength = path.length + 1 |
|
||||
|
|
||||
// The package.json `files` whitelist still
|
|
||||
// honors npmignores: https://docs.npmjs.com/files/package.json#files
|
|
||||
// but we don't ignore if the user is explicitly listing files
|
|
||||
// under the now namespace, or using files in combination with gitignore
|
|
||||
const overrideIgnores = |
|
||||
(pkg.now && pkg.now.files) || |
|
||||
nowConfig.files || |
|
||||
(gitIgnore !== null && pkg.files) |
|
||||
const accepts = overrideIgnores |
|
||||
? () => true |
|
||||
: file => { |
|
||||
const relativePath = file.substr(prefixLength) |
|
||||
|
|
||||
if (relativePath === '') { |
|
||||
return true |
|
||||
} |
|
||||
|
|
||||
const accepted = filter(relativePath) |
|
||||
if (!accepted && debug) { |
|
||||
console.log('> [debug] ignoring "%s"', file) |
|
||||
} |
|
||||
return accepted |
|
||||
} |
|
||||
|
|
||||
// Locate files
|
|
||||
if (debug) { |
|
||||
console.time(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
const files = await explode(search, { |
|
||||
accepts, |
|
||||
limit, |
|
||||
debug |
|
||||
}) |
|
||||
|
|
||||
if (debug) { |
|
||||
console.timeEnd(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
// Always include manifest as npm does not allow ignoring it
|
|
||||
// source: https://docs.npmjs.com/files/package.json#files
|
|
||||
files.push(asAbsolute('package.json', path)) |
|
||||
|
|
||||
if (hasNowJson) { |
|
||||
files.push(asAbsolute('now.json', path)) |
|
||||
} |
|
||||
|
|
||||
// Get files
|
|
||||
return unique(files) |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Returns a list of files in the given |
|
||||
* directory that are subject to be |
|
||||
* sent to docker as build context. |
|
||||
* |
|
||||
* @param {String} full path to directory |
|
||||
* @param {String} contents of `Dockerfile` |
|
||||
* @param {Object} options: |
|
||||
* - `limit` {Number|null} byte limit |
|
||||
* - `debug` {Boolean} warn upon ignore |
|
||||
* @return {Array} comprehensive list of paths to sync |
|
||||
*/ |
|
||||
|
|
||||
async function docker( |
|
||||
path, |
|
||||
nowConfig = {}, |
|
||||
{ limit = null, hasNowJson = false, debug = false } = {} |
|
||||
) { |
|
||||
const whitelist = nowConfig.files |
|
||||
|
|
||||
// Base search path
|
|
||||
// the now.json `files` whitelist still
|
|
||||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|
||||
const search_ = whitelist || ['.'] |
|
||||
|
|
||||
// Convert all filenames into absolute paths
|
|
||||
const search = search_.map(file => asAbsolute(file, path)) |
|
||||
|
|
||||
// Compile list of ignored patterns and files
|
|
||||
const dockerIgnore = await maybeRead(resolve(path, '.dockerignore'), null) |
|
||||
|
|
||||
const filter = ignore() |
|
||||
.add( |
|
||||
IGNORED + |
|
||||
'\n' + |
|
||||
clearRelative( |
|
||||
dockerIgnore === null |
|
||||
? await maybeRead(resolve(path, '.gitignore')) |
|
||||
: dockerIgnore |
|
||||
) |
|
||||
) |
|
||||
.createFilter() |
|
||||
|
|
||||
const prefixLength = path.length + 1 |
|
||||
const accepts = function(file) { |
|
||||
const relativePath = file.substr(prefixLength) |
|
||||
|
|
||||
if (relativePath === '') { |
|
||||
return true |
|
||||
} |
|
||||
|
|
||||
const accepted = filter(relativePath) |
|
||||
if (!accepted && debug) { |
|
||||
console.log('> [debug] ignoring "%s"', file) |
|
||||
} |
|
||||
return accepted |
|
||||
} |
|
||||
|
|
||||
// Locate files
|
|
||||
if (debug) { |
|
||||
console.time(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
const files = await explode(search, { accepts, limit, debug }) |
|
||||
|
|
||||
if (debug) { |
|
||||
console.timeEnd(`> [debug] locating files ${path}`) |
|
||||
} |
|
||||
|
|
||||
// Always include manifest as npm does not allow ignoring it
|
|
||||
// source: https://docs.npmjs.com/files/package.json#files
|
|
||||
files.push(asAbsolute('Dockerfile', path)) |
|
||||
|
|
||||
if (hasNowJson) { |
|
||||
files.push(asAbsolute('now.json', path)) |
|
||||
} |
|
||||
|
|
||||
// Get files
|
|
||||
return unique(files) |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Explodes directories into a full list of files. |
|
||||
* Eg: |
|
||||
* in: ['/a.js', '/b'] |
|
||||
* out: ['/a.js', '/b/c.js', '/b/d.js'] |
|
||||
* |
|
||||
* @param {Array} of {String}s representing paths |
|
||||
* @param {Array} of ignored {String}s. |
|
||||
* @param {Object} options: |
|
||||
* - `limit` {Number|null} byte limit |
|
||||
* - `debug` {Boolean} warn upon ignore |
|
||||
* @return {Array} of {String}s of full paths |
|
||||
*/ |
|
||||
|
|
||||
async function explode(paths, { accepts, debug }) { |
|
||||
const list = async file => { |
|
||||
let path = file |
|
||||
let s |
|
||||
|
|
||||
if (!accepts(file)) { |
|
||||
return null |
|
||||
} |
|
||||
|
|
||||
try { |
|
||||
s = await stat(path) |
|
||||
} catch (e) { |
|
||||
// In case the file comes from `files`
|
|
||||
// and it wasn't specified with `.js` by the user
|
|
||||
path = file + '.js' |
|
||||
|
|
||||
try { |
|
||||
s = await stat(path) |
|
||||
} catch (e2) { |
|
||||
if (debug) { |
|
||||
console.log('> [debug] ignoring invalid file "%s"', file) |
|
||||
} |
|
||||
return null |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (s.isDirectory()) { |
|
||||
const all = await readdir(file) |
|
||||
/* eslint-disable no-use-before-define */ |
|
||||
return many(all.map(subdir => asAbsolute(subdir, file))) |
|
||||
/* eslint-enable no-use-before-define */ |
|
||||
} else if (!s.isFile()) { |
|
||||
if (debug) { |
|
||||
console.log('> [debug] ignoring special file "%s"', file) |
|
||||
} |
|
||||
return null |
|
||||
} |
|
||||
|
|
||||
return path |
|
||||
} |
|
||||
|
|
||||
const many = all => Promise.all(all.map(file => list(file))) |
|
||||
return flatten(await many(paths)).filter(v => v !== null) |
|
||||
} |
|
||||
|
|
||||
module.exports = { |
|
||||
npm, |
|
||||
docker, |
|
||||
staticFiles |
|
||||
} |
|
@ -1,221 +0,0 @@ |
|||||
// Native
|
|
||||
const path = require('path') |
|
||||
const url = require('url') |
|
||||
const childProcess = require('child_process') |
|
||||
|
|
||||
// Packages
|
|
||||
const fs = require('fs-extra') |
|
||||
const download = require('download') |
|
||||
const tmp = require('tmp-promise') |
|
||||
const isURL = require('is-url') |
|
||||
|
|
||||
const cloneRepo = (parts, tmpDir, { ssh }) => |
|
||||
new Promise((resolve, reject) => { |
|
||||
let host |
|
||||
|
|
||||
switch (parts.type) { |
|
||||
case 'GitLab': |
|
||||
host = `gitlab.com` |
|
||||
break |
|
||||
case 'Bitbucket': |
|
||||
host = `bitbucket.org` |
|
||||
break |
|
||||
default: |
|
||||
host = `github.com` |
|
||||
} |
|
||||
|
|
||||
const url = ssh |
|
||||
? `git@${host}:${parts.main}` |
|
||||
: `https://${host}/${parts.main}` |
|
||||
|
|
||||
const ref = parts.ref || (parts.type === 'Bitbucket' ? 'default' : 'master') |
|
||||
const cmd = `git clone ${url} --single-branch ${ref}` |
|
||||
|
|
||||
childProcess.exec(cmd, { cwd: tmpDir.path }, (err, stdout) => { |
|
||||
if (err) { |
|
||||
reject(err) |
|
||||
} |
|
||||
|
|
||||
resolve(stdout) |
|
||||
}) |
|
||||
}) |
|
||||
|
|
||||
const renameRepoDir = async (pathParts, tmpDir) => { |
|
||||
const tmpContents = await fs.readdir(tmpDir.path) |
|
||||
|
|
||||
const oldTemp = path.join(tmpDir.path, tmpContents[0]) |
|
||||
const newTemp = path.join(tmpDir.path, pathParts.main.replace('/', '-')) |
|
||||
|
|
||||
await fs.rename(oldTemp, newTemp) |
|
||||
tmpDir.path = newTemp |
|
||||
|
|
||||
return tmpDir |
|
||||
} |
|
||||
|
|
||||
const capitalizePlatform = name => { |
|
||||
const names = { |
|
||||
github: 'GitHub', |
|
||||
gitlab: 'GitLab', |
|
||||
bitbucket: 'Bitbucket' |
|
||||
} |
|
||||
|
|
||||
return names[name] |
|
||||
} |
|
||||
|
|
||||
const splittedURL = fullURL => { |
|
||||
const parsedURL = url.parse(fullURL) |
|
||||
const pathParts = parsedURL.path.split('/') |
|
||||
|
|
||||
pathParts.shift() |
|
||||
|
|
||||
// Set path to repo...
|
|
||||
const main = pathParts[0] + '/' + pathParts[1] |
|
||||
|
|
||||
// ...and then remove it from the parts
|
|
||||
pathParts.splice(0, 2) |
|
||||
|
|
||||
// Assign Git reference
|
|
||||
let ref = pathParts.length >= 2 ? pathParts[1] : '' |
|
||||
|
|
||||
// Firstly be sure that we haven know the ref type
|
|
||||
if (pathParts[0]) { |
|
||||
// Then shorten the SHA of the commit
|
|
||||
if (pathParts[0] === 'commit' || pathParts[0] === 'commits') { |
|
||||
ref = ref.substring(0, 7) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// We're deploying master by default,
|
|
||||
// so there's no need to indicate it explicitly
|
|
||||
if (ref === 'master') { |
|
||||
ref = '' |
|
||||
} |
|
||||
|
|
||||
return { |
|
||||
main, |
|
||||
ref, |
|
||||
type: capitalizePlatform(parsedURL.host.split('.')[0]) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
const gitPathParts = main => { |
|
||||
let ref = '' |
|
||||
|
|
||||
if (isURL(main)) { |
|
||||
return splittedURL(main) |
|
||||
} |
|
||||
|
|
||||
if (main.split('/')[1].includes('#')) { |
|
||||
const parts = main.split('#') |
|
||||
|
|
||||
ref = parts[1] |
|
||||
main = parts[0] |
|
||||
} |
|
||||
|
|
||||
return { |
|
||||
main, |
|
||||
ref, |
|
||||
type: capitalizePlatform('github') |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
const downloadRepo = async repoPath => { |
|
||||
const pathParts = gitPathParts(repoPath) |
|
||||
|
|
||||
const tmpDir = await tmp.dir({ |
|
||||
// We'll remove it manually once deployment is done
|
|
||||
keep: true, |
|
||||
// Recursively remove directory when calling respective method
|
|
||||
unsafeCleanup: true |
|
||||
}) |
|
||||
|
|
||||
let gitInstalled = true |
|
||||
|
|
||||
try { |
|
||||
await cloneRepo(pathParts, tmpDir) |
|
||||
} catch (err) { |
|
||||
try { |
|
||||
await cloneRepo(pathParts, tmpDir, { ssh: true }) |
|
||||
} catch (err) { |
|
||||
gitInstalled = false |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (gitInstalled) { |
|
||||
const renaming = await renameRepoDir(pathParts, tmpDir) |
|
||||
return renaming |
|
||||
} |
|
||||
|
|
||||
let url |
|
||||
|
|
||||
switch (pathParts.type) { |
|
||||
case 'GitLab': { |
|
||||
const ref = pathParts.ref ? `?ref=${pathParts.ref}` : '' |
|
||||
url = `https://gitlab.com/${pathParts.main}/repository/archive.tar` + ref |
|
||||
break |
|
||||
} |
|
||||
case 'Bitbucket': |
|
||||
url = `https://bitbucket.org/${pathParts.main}/get/${pathParts.ref || |
|
||||
'default'}.zip` |
|
||||
break |
|
||||
default: |
|
||||
url = `https://api.github.com/repos/${pathParts.main}/tarball/${pathParts.ref}` |
|
||||
} |
|
||||
|
|
||||
try { |
|
||||
await download(url, tmpDir.path, { |
|
||||
extract: true |
|
||||
}) |
|
||||
} catch (err) { |
|
||||
tmpDir.cleanup() |
|
||||
return false |
|
||||
} |
|
||||
|
|
||||
const renaming = await renameRepoDir(pathParts, tmpDir) |
|
||||
return renaming |
|
||||
} |
|
||||
|
|
||||
const isRepoPath = path => { |
|
||||
if (!path) { |
|
||||
return false |
|
||||
} |
|
||||
|
|
||||
const allowedHosts = ['github.com', 'gitlab.com', 'bitbucket.org'] |
|
||||
|
|
||||
if (isURL(path)) { |
|
||||
const urlParts = url.parse(path) |
|
||||
const slashSplitted = urlParts.path.split('/').filter(n => n) |
|
||||
const notBare = slashSplitted.length >= 2 |
|
||||
|
|
||||
if (allowedHosts.includes(urlParts.host) && notBare) { |
|
||||
return true |
|
||||
} |
|
||||
|
|
||||
const err = new Error(`Host "${urlParts.host}" is unsupported.`) |
|
||||
err.code = 'INVALID_URL' |
|
||||
err.userError = true |
|
||||
throw err |
|
||||
} |
|
||||
|
|
||||
return /[^\s\\]\/[^\s\\]/g.test(path) |
|
||||
} |
|
||||
|
|
||||
const fromGit = async (path, debug) => { |
|
||||
let tmpDir = false |
|
||||
|
|
||||
try { |
|
||||
tmpDir = await downloadRepo(path) |
|
||||
} catch (err) { |
|
||||
if (debug) { |
|
||||
console.log(`Could not download "${path}" repo from GitHub`) |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
return tmpDir |
|
||||
} |
|
||||
|
|
||||
module.exports = { |
|
||||
gitPathParts, |
|
||||
isRepoPath, |
|
||||
fromGit |
|
||||
} |
|
@ -1,44 +0,0 @@ |
|||||
// Native
|
|
||||
const { createHash } = require('crypto') |
|
||||
|
|
||||
// Packages
|
|
||||
const { readFile } = require('fs-extra') |
|
||||
|
|
||||
/** |
|
||||
* Computes hashes for the contents of each file given. |
|
||||
* |
|
||||
* @param {Array} of {String} full paths |
|
||||
* @return {Map} |
|
||||
*/ |
|
||||
|
|
||||
async function hashes(files) { |
|
||||
const map = new Map() |
|
||||
|
|
||||
await Promise.all( |
|
||||
files.map(async name => { |
|
||||
const data = await readFile(name) |
|
||||
|
|
||||
const h = hash(data) |
|
||||
const entry = map.get(h) |
|
||||
if (entry) { |
|
||||
entry.names.push(name) |
|
||||
} else { |
|
||||
map.set(hash(data), { names: [name], data }) |
|
||||
} |
|
||||
}) |
|
||||
) |
|
||||
return map |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Computes a hash for the given buf. |
|
||||
* |
|
||||
* @param {Buffer} file data |
|
||||
* @return {String} hex digest |
|
||||
*/ |
|
||||
|
|
||||
function hash(buf) { |
|
||||
return createHash('sha1').update(buf).digest('hex') |
|
||||
} |
|
||||
|
|
||||
module.exports = hashes |
|
@ -1,17 +0,0 @@ |
|||||
// Base `.gitignore` to which we add entries
|
|
||||
// supplied by the user
|
|
||||
module.exports = `.hg
|
|
||||
.git |
|
||||
.gitmodules |
|
||||
.svn |
|
||||
.npmignore |
|
||||
.dockerignore |
|
||||
.gitignore |
|
||||
.*.swp |
|
||||
.DS_Store |
|
||||
.wafpicke-* |
|
||||
.lock-wscript |
|
||||
npm-debug.log |
|
||||
config.gypi |
|
||||
node_modules |
|
||||
CVS` |
|
@ -1,14 +0,0 @@ |
|||||
exports.compare = function(a, b) { |
|
||||
return ( |
|
||||
a.serial.localeCompare(b.serial) || |
|
||||
// For the case serials are a same value on old logs
|
|
||||
a.created.getTime() - b.created.getTime() |
|
||||
) |
|
||||
} |
|
||||
|
|
||||
exports.deserialize = function(log) { |
|
||||
return Object.assign({}, log, { |
|
||||
date: new Date(log.date), |
|
||||
created: new Date(log.created) |
|
||||
}) |
|
||||
} |
|
File diff suppressed because it is too large
@ -1,57 +0,0 @@ |
|||||
const ms = require('ms') |
|
||||
|
|
||||
const Now = require('./now') |
|
||||
|
|
||||
async function parsePlan(json) { |
|
||||
const { subscription } = json |
|
||||
let id |
|
||||
let until |
|
||||
let name |
|
||||
|
|
||||
if (subscription) { |
|
||||
const planItems = subscription.items.data |
|
||||
const mainPlan = planItems.find(d => d.plan.metadata.is_main_plan === '1') |
|
||||
|
|
||||
if (mainPlan) { |
|
||||
id = mainPlan.plan.id |
|
||||
name = mainPlan.plan.name |
|
||||
if (subscription.cancel_at_period_end) { |
|
||||
until = ms( |
|
||||
new Date(subscription.current_period_end * 1000) - new Date(), |
|
||||
{ long: true } |
|
||||
) |
|
||||
} |
|
||||
} else { |
|
||||
id = 'oss' |
|
||||
} |
|
||||
} else { |
|
||||
id = 'oss' |
|
||||
} |
|
||||
|
|
||||
return { id, name, until } |
|
||||
} |
|
||||
|
|
||||
module.exports = class Plans extends Now { |
|
||||
async getCurrent() { |
|
||||
const res = await this._fetch('/plan') |
|
||||
const json = await res.json() |
|
||||
return parsePlan(json) |
|
||||
} |
|
||||
|
|
||||
async set(plan) { |
|
||||
const res = await this._fetch('/plan', { |
|
||||
method: 'PUT', |
|
||||
body: { plan } |
|
||||
}) |
|
||||
|
|
||||
const json = await res.json() |
|
||||
|
|
||||
if (res.ok) { |
|
||||
return parsePlan(json) |
|
||||
} |
|
||||
|
|
||||
const err = new Error(json.error.message) |
|
||||
err.code = json.error.code |
|
||||
throw err |
|
||||
} |
|
||||
} |
|
@ -1,195 +0,0 @@ |
|||||
// Native
|
|
||||
const { basename, resolve: resolvePath } = require('path') |
|
||||
|
|
||||
// Packages
|
|
||||
const chalk = require('chalk') |
|
||||
const { readFile } = require('fs-extra') |
|
||||
const { parse: parseDockerfile } = require('docker-file-parser') |
|
||||
const determineType = require('deployment-type') |
|
||||
|
|
||||
module.exports = readMetaData |
|
||||
|
|
||||
async function readMetaData( |
|
||||
path, |
|
||||
{ |
|
||||
deploymentType, |
|
||||
deploymentName, |
|
||||
sessionAffinity, |
|
||||
quiet = false, |
|
||||
strict = true |
|
||||
} |
|
||||
) { |
|
||||
let description |
|
||||
let type = deploymentType |
|
||||
let name = deploymentName |
|
||||
let affinity = sessionAffinity |
|
||||
|
|
||||
const pkg = await readJSON(path, 'package.json') |
|
||||
let nowConfig = await readJSON(path, 'now.json') |
|
||||
const dockerfile = await readDockerfile(path) |
|
||||
|
|
||||
const hasNowJson = Boolean(nowConfig) |
|
||||
|
|
||||
if (pkg && pkg.now) { |
|
||||
// If the project has both a `now.json` and `now` Object in the `package.json`
|
|
||||
// file, then fail hard and let the user know that they need to pick one or the
|
|
||||
// other
|
|
||||
if (nowConfig) { |
|
||||
const err = new Error( |
|
||||
'You have a `now` configuration field inside `package.json` ' + |
|
||||
'but configuration is also present in `now.json`! ' + |
|
||||
"Please ensure there's a single source of configuration by removing one." |
|
||||
) |
|
||||
err.userError = true |
|
||||
throw err |
|
||||
} else { |
|
||||
nowConfig = pkg.now |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// We can remove this once the prompt for choosing `--npm` or `--docker` is gone
|
|
||||
if (pkg && pkg.now && pkg.now.type) { |
|
||||
type = nowConfig.type |
|
||||
} |
|
||||
|
|
||||
// The same goes for this
|
|
||||
if (nowConfig && nowConfig.type) { |
|
||||
type = nowConfig.type |
|
||||
} |
|
||||
|
|
||||
if (!type) { |
|
||||
type = await determineType(path) |
|
||||
|
|
||||
// Both `package.json` and `Dockerfile` exist! Prompt the user to pick one.
|
|
||||
// We can remove this soon (details are internal) - also read the comment paragraph above
|
|
||||
if (type === 'docker' && (pkg && dockerfile)) { |
|
||||
const err = new Error( |
|
||||
'Ambiguous deployment (`package.json` and `Dockerfile` found). ' + |
|
||||
'Please supply `--npm` or `--docker` to disambiguate.' |
|
||||
) |
|
||||
|
|
||||
err.userError = true |
|
||||
err.code = 'MULTIPLE_MANIFESTS' |
|
||||
|
|
||||
throw err |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (!name && nowConfig) { |
|
||||
name = nowConfig.name |
|
||||
} |
|
||||
|
|
||||
if (!affinity && nowConfig) { |
|
||||
affinity = nowConfig.sessionAffinity |
|
||||
} |
|
||||
|
|
||||
if (type === 'npm') { |
|
||||
if (pkg) { |
|
||||
if (!name && pkg.now && pkg.now.name) { |
|
||||
name = String(pkg.now.name) |
|
||||
} |
|
||||
|
|
||||
if (!name && pkg.name) { |
|
||||
name = String(pkg.name) |
|
||||
} |
|
||||
|
|
||||
description = pkg.description |
|
||||
} |
|
||||
} else if (type === 'docker') { |
|
||||
if (strict && dockerfile.length <= 0) { |
|
||||
const err = new Error('No commands found in `Dockerfile`') |
|
||||
err.userError = true |
|
||||
|
|
||||
throw err |
|
||||
} |
|
||||
|
|
||||
const labels = {} |
|
||||
|
|
||||
dockerfile.filter(cmd => cmd.name === 'LABEL').forEach(({ args }) => { |
|
||||
for (const key in args) { |
|
||||
if (!{}.hasOwnProperty.call(args, key)) { |
|
||||
continue |
|
||||
} |
|
||||
|
|
||||
// Unescape and convert into string
|
|
||||
try { |
|
||||
labels[key] = args[key] |
|
||||
} catch (err) { |
|
||||
const e = new Error( |
|
||||
`Error parsing value for LABEL ${key} in \`Dockerfile\`` |
|
||||
) |
|
||||
|
|
||||
e.userError = true |
|
||||
throw e |
|
||||
} |
|
||||
} |
|
||||
}) |
|
||||
|
|
||||
if (!name) { |
|
||||
name = labels.name |
|
||||
} |
|
||||
|
|
||||
description = labels.description |
|
||||
} else if (type === 'static') { |
|
||||
// Do nothing
|
|
||||
} else { |
|
||||
throw new TypeError(`Unsupported "deploymentType": ${type}`) |
|
||||
} |
|
||||
|
|
||||
// No name in `package.json` / `now.json`, or "name" label in Dockerfile.
|
|
||||
// Default to the basename of the root dir
|
|
||||
if (!name) { |
|
||||
name = basename(path) |
|
||||
|
|
||||
if (!quiet && type !== 'static') { |
|
||||
if (type === 'docker') { |
|
||||
console.log( |
|
||||
`> No \`name\` LABEL in \`Dockerfile\`, using ${chalk.bold(name)}` |
|
||||
) |
|
||||
} else { |
|
||||
console.log( |
|
||||
`> No \`name\` in \`package.json\`, using ${chalk.bold(name)}` |
|
||||
) |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
return { |
|
||||
name, |
|
||||
description, |
|
||||
type, |
|
||||
pkg, |
|
||||
nowConfig, |
|
||||
hasNowJson, |
|
||||
|
|
||||
// XXX: legacy
|
|
||||
deploymentType: type, |
|
||||
sessionAffinity: affinity |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
async function readJSON(path, name) { |
|
||||
try { |
|
||||
const contents = await readFile(resolvePath(path, name), 'utf8') |
|
||||
return JSON.parse(contents) |
|
||||
} catch (err) { |
|
||||
// If the file doesn't exist then that's fine; any other error bubbles up
|
|
||||
if (err.code !== 'ENOENT') { |
|
||||
err.userError = true |
|
||||
throw err |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
async function readDockerfile(path, name = 'Dockerfile') { |
|
||||
try { |
|
||||
const contents = await readFile(resolvePath(path, name), 'utf8') |
|
||||
return parseDockerfile(contents, { includeComments: true }) |
|
||||
} catch (err) { |
|
||||
// If the file doesn't exist then that's fine; any other error bubbles up
|
|
||||
if (err.code !== 'ENOENT') { |
|
||||
err.userError = true |
|
||||
throw err |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,20 +0,0 @@ |
|||||
// Native
|
|
||||
const { parse } = require('url') |
|
||||
|
|
||||
/** |
|
||||
* Converts a valid deployment lookup parameter to a hostname. |
|
||||
* `http://google.com` => google.com |
|
||||
* google.com => google.com |
|
||||
*/ |
|
||||
|
|
||||
function toHost(url) { |
|
||||
if (/^https?:\/\//.test(url)) { |
|
||||
return parse(url).host |
|
||||
} |
|
||||
|
|
||||
// Remove any path if present
|
|
||||
// `a.b.c/` => `a.b.c`
|
|
||||
return url.replace(/(\/\/)?([^/]+)(.*)/, '$2') |
|
||||
} |
|
||||
|
|
||||
module.exports = toHost |
|
Loading…
Reference in new issue