Browse Source

Removed useless legacy files

master
Leo Lamprecht 7 years ago
parent
commit
9b297cf76f
  1. 32
      src/providers/sh/commands/bin/deploy.js
  2. 34
      src/providers/sh/commands/bin/login.js
  3. 0
      src/providers/sh/commands/lib/check-path.js
  4. 5
      src/providers/sh/commands/lib/error.js
  5. 2
      src/providers/sh/commands/lib/index.js
  6. 141
      src/providers/sh/commands/lib/login.js
  7. 0
      src/providers/sh/commands/lib/prompt-options.js
  8. 1
      src/providers/sh/deploy.js
  9. 26
      src/providers/sh/index.js
  10. 83
      src/providers/sh/legacy/agent.js
  11. 138
      src/providers/sh/legacy/build-logger.js
  12. 91
      src/providers/sh/legacy/error.js
  13. 385
      src/providers/sh/legacy/get-files.js
  14. 221
      src/providers/sh/legacy/git.js
  15. 44
      src/providers/sh/legacy/hash.js
  16. 17
      src/providers/sh/legacy/ignored.js
  17. 14
      src/providers/sh/legacy/logs.js
  18. 1031
      src/providers/sh/legacy/now.js
  19. 57
      src/providers/sh/legacy/plans.js
  20. 195
      src/providers/sh/legacy/read-metadata.js
  21. 20
      src/providers/sh/legacy/to-host.js

32
src/providers/sh/legacy/deploy.js → src/providers/sh/commands/bin/deploy.js

@ -16,21 +16,21 @@ const { write: copy } = require('clipboardy')
const inquirer = require('inquirer')
// Ours
const Logger = require('./build-logger')
const Now = require('./now.js')
const toHumanPath = require('../../../util/humanize-path')
const { handleError, error } = require('./error')
const { fromGit, isRepoPath, gitPathParts } = require('./git')
const readMetaData = require('./read-metadata')
const checkPath = require('./check-path')
const logo = require('../../../util/output/logo')
const cmd = require('../../../util/output/cmd')
const info = require('../../../util/output/info')
const wait = require('../../../util/output/wait')
const NowPlans = require('./plans')
const promptBool = require('../../../util/input/prompt-bool')
const promptOptions = require('./prompt-options')
const note = require('../../../util/output/note')
const Logger = require('../lib/build-logger')
const Now = require('../lib')
const toHumanPath = require('../../../../util/humanize-path')
const { handleError, error } = require('../lib/error')
const { fromGit, isRepoPath, gitPathParts } = require('../lib/git')
const readMetaData = require('../lib/read-metadata')
const checkPath = require('../lib/check-path')
const logo = require('../../../../util/output/logo')
const cmd = require('../../../../util/output/cmd')
const info = require('../../../../util/output/info')
const wait = require('../../../../util/output/wait')
const NowPlans = require('../lib/plans')
const promptBool = require('../../../../util/input/prompt-bool')
const promptOptions = require('../lib/prompt-options')
const note = require('../../../../util/output/note')
const mriOpts = {
string: ['config', 'token', 'name', 'alias', 'session-affinity'],
@ -204,7 +204,7 @@ const envFields = async list => {
}
// eslint-disable-next-line import/no-unassigned-import
require('../../../util/input/patch-inquirer')
require('../../../../util/input/patch-inquirer')
console.log(
info('Please enter the values for the following environment variables:')

34
src/providers/sh/login.js → src/providers/sh/commands/bin/login.js

@ -10,26 +10,26 @@ const ms = require('ms')
const { validate: validateEmail } = require('email-validator')
// ours
const { version } = require('./util/pkg')
const ua = require('./util/ua')
const error = require('../../util/output/error')
const aborted = require('../../util/output/aborted')
const wait = require('../../util/output/wait')
const highlight = require('../../util/output/highlight')
const info = require('../../util/output/info')
const ok = require('../../util/output/ok')
const cmd = require('../../util/output/cmd')
const ready = require('../../util/output/ready')
const param = require('../../util/output/param')
const eraseLines = require('../../util/output/erase-lines')
const sleep = require('../../util/sleep')
const getUser = require('./util/get-user')
const { version } = require('../../util/pkg')
const ua = require('../../util/ua')
const error = require('../../../../util/output/error')
const aborted = require('../../../../util/output/aborted')
const wait = require('../../../../util/output/wait')
const highlight = require('../../../../util/output/highlight')
const info = require('../../../../util/output/info')
const ok = require('../../../../util/output/ok')
const cmd = require('../../../../util/output/cmd')
const ready = require('../../../../util/output/ready')
const param = require('../../../../util/output/param')
const eraseLines = require('../../../../util/output/erase-lines')
const sleep = require('../../../../util/sleep')
const getUser = require('../../util/get-user')
const {
writeToAuthConfigFile,
writeToConfigFile
} = require('../../util/config-files')
const getNowDir = require('../../get-now-dir')
const hp = require('../../util/humanize-path')
} = require('../../../../util/config-files')
const getNowDir = require('../../../../get-now-dir')
const hp = require('../../../../util/humanize-path')
// POSTs to /now/registration – either creates an account or performs a login
// returns {token, securityCode}

0
src/providers/sh/legacy/check-path.js → src/providers/sh/commands/lib/check-path.js

5
src/providers/sh/commands/lib/error.js

@ -2,8 +2,9 @@
const ms = require('ms')
const chalk = require('chalk')
const error = require('./utils/output/error')
const info = require('./utils/output/info')
// Utilities
const error = require('../../../../util/output/error')
const info = require('../../../../util/output/info')
function handleError(err, { debug = false } = {}) {
// Coerce Strings to Error instances

2
src/providers/sh/commands/lib/index.js

@ -22,7 +22,7 @@ const {
npm: getNpmFiles,
docker: getDockerFiles
} = require('./get-files')
const ua = require('./ua')
const ua = require('../../util/ua')
const hash = require('./hash')
const Agent = require('./agent')
const toHost = require('./to-host')

141
src/providers/sh/commands/lib/login.js

@ -1,141 +0,0 @@
// Native
const os = require('os')
// Packages
const { stringify: stringifyQuery } = require('querystring')
const chalk = require('chalk')
const fetch = require('node-fetch')
const { validate } = require('email-validator')
const readEmail = require('email-prompt')
const ora = require('ora')
// Ours
const pkg = require('../../../../util/pkg')
const ua = require('./ua')
const cfg = require('./cfg')
async function getVerificationData(url, email) {
const tokenName = `Now CLI ${os.platform()}-${os.arch()} ${pkg.version} (${os.hostname()})`
const data = JSON.stringify({ email, tokenName })
const res = await fetch(`${url}/now/registration`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(data),
'User-Agent': ua
},
body: data
})
const body = await res.json()
if (res.status !== 200) {
throw new Error(
`Verification error: ${res.status}${JSON.stringify(body)}`
)
}
return body
}
async function verify(url, email, verificationToken) {
const query = {
email,
token: verificationToken
}
const res = await fetch(
`${url}/now/registration/verify?${stringifyQuery(query)}`,
{
headers: { 'User-Agent': ua }
}
)
const body = await res.json()
return body.token
}
function sleep(ms) {
return new Promise(resolve => {
setTimeout(resolve, ms)
})
}
async function register(url, { retryEmail = false } = {}) {
let email
try {
email = await readEmail({ invalid: retryEmail })
} catch (err) {
process.stdout.write('\n')
throw err
}
process.stdout.write('\n')
if (!validate(email)) {
return register(url, { retryEmail: true })
}
const { token, securityCode } = await getVerificationData(url, email)
console.log(
`> Please follow the link sent to ${chalk.bold(email)} to log in.`
)
if (securityCode) {
console.log(
`> Verify that the provided security code in the email matches ${chalk.cyan(
chalk.bold(securityCode)
)}.`
)
}
process.stdout.write('\n')
const spinner = ora({
text: 'Waiting for confirmation...'
}).start()
let final
/* eslint-disable no-await-in-loop */
do {
await sleep(2500)
try {
final = await verify(url, email, token)
} catch (err) {}
} while (!final)
/* eslint-enable no-await-in-loop */
let user
try {
user = (await (await fetch(`${url}/www/user`, {
headers: {
Authorization: `Bearer ${final}`
}
})).json()).user
} catch (err) {
spinner.stop()
throw new Error(`Couldn't retrieve user details ${err.message}`)
}
spinner.text = 'Confirmed email address!'
spinner.stopAndPersist('✔')
process.stdout.write('\n')
return {
token: final,
user: {
uid: user.uid,
username: user.username,
email: user.email
},
lastUpdate: Date.now()
}
}
module.exports = async function(url) {
const loginData = await register(url)
await cfg.merge(loginData)
await cfg.remove('currentTeam') // Make sure to logout the team too
await cfg.remove('email') // Remove old schema from previus versions
return loginData.token
}

0
src/providers/sh/legacy/prompt-options.js → src/providers/sh/commands/lib/prompt-options.js

1
src/providers/sh/deploy.js

@ -1 +0,0 @@
module.exports = require('./legacy/deploy')

26
src/providers/sh/index.js

@ -1,4 +1,5 @@
const mainCommands = new Set([
'deploy',
'help',
'list',
'remove',
@ -12,15 +13,11 @@ const mainCommands = new Set([
'teams',
'logs',
'scale',
'login',
'logout',
'whoami'
])
const specialCommands = new Set([
'deploy',
'login'
])
const aliases = {
list: ['ls'],
remove: ['rm'],
@ -34,10 +31,7 @@ const aliases = {
logs: ['log']
}
const subcommands = new Set([
...Array.from(mainCommands),
...Array.from(specialCommands)
])
const subcommands = new Set(mainCommands)
// Add aliases to available sub commands
for (const alias in aliases) {
@ -48,15 +42,9 @@ for (const alias in aliases) {
}
}
const list = {
const details = {
title: 'now.sh',
subcommands,
get deploy() {
return require('./deploy')
},
get login() {
return require('./login')
}
subcommands
}
for (const subcommand of mainCommands) {
@ -67,7 +55,7 @@ for (const subcommand of mainCommands) {
}
for (const handler of handlers) {
Object.defineProperty(list, handler, {
Object.defineProperty(details, handler, {
get() {
return require(`./commands/bin/${subcommand}`)
}
@ -75,4 +63,4 @@ for (const subcommand of mainCommands) {
}
}
module.exports = list
module.exports = details

83
src/providers/sh/legacy/agent.js

@ -1,83 +0,0 @@
// Native
const { parse } = require('url')
const http = require('http')
const https = require('https')
// Packages
const fetch = require('node-fetch')
/**
* Returns a `fetch` version with a similar
* API to the browser's configured with a
* HTTP2 agent.
*
* It encodes `body` automatically as JSON.
*
* @param {String} host
* @return {Function} fetch
*/
module.exports = class Agent {
constructor(url, { tls = true, debug } = {}) {
this._url = url
const parsed = parse(url)
this._protocol = parsed.protocol
this._debug = debug
if (tls) {
this._initAgent()
}
}
_initAgent() {
const module = this._protocol === 'https:' ? https : http
this._agent = new module.Agent({
keepAlive: true,
keepAliveMsecs: 10000,
maxSockets: 8
}).on('error', err => this._onError(err, this._agent))
}
_onError(err, agent) {
if (this._debug) {
console.log(`> [debug] agent connection error ${err}\n${err.stack}`)
}
if (this._agent === agent) {
this._agent = null
}
}
fetch(path, opts = {}) {
if (!this._agent) {
if (this._debug) {
console.log('> [debug] re-initializing agent')
}
this._initAgent()
}
const { body } = opts
if (this._agent) {
opts.agent = this._agent
}
if (body && typeof body === 'object' && typeof body.pipe !== 'function') {
opts.headers['Content-Type'] = 'application/json'
opts.body = JSON.stringify(body)
}
if (opts.body && typeof body.pipe !== 'function') {
opts.headers['Content-Length'] = Buffer.byteLength(opts.body)
}
return fetch(this._url + path, opts)
}
close() {
if (this._debug) {
console.log('> [debug] closing agent')
}
if (this._agent) {
this._agent.destroy()
}
}
}

138
src/providers/sh/legacy/build-logger.js

@ -1,138 +0,0 @@
// Native
const EventEmitter = require('events')
// Packages
const io = require('socket.io-client')
const chalk = require('chalk')
const { compare, deserialize } = require('./logs')
module.exports = class Logger extends EventEmitter {
constructor(host, token, { debug = false, quiet = false } = {}) {
super()
this.host = host
this.token = token
this.debug = debug
this.quiet = quiet
// ReadyState
this.building = false
this.socket = io(`https://io.now.sh/states?host=${host}&v=2`)
this.socket.once('error', this.onSocketError.bind(this))
this.socket.on('auth', this.onAuth.bind(this))
this.socket.on('state', this.onState.bind(this))
this.socket.on('logs', this.onLog.bind(this))
this.socket.on('backend', this.onComplete.bind(this))
// Log buffer
this.buf = []
this.printed = new Set()
}
onAuth(callback) {
if (this.debug) {
console.log('> [debug] authenticate')
}
callback(this.token)
}
onState(state) {
// Console.log(state)
if (!state.id) {
console.error('> Deployment not found')
this.emit('error')
return
}
if (state.error) {
this.emit('error', state)
return
}
if (state.backend) {
this.onComplete()
return
}
if (state.logs) {
state.logs.forEach(this.onLog, this)
}
}
onLog(log) {
if (!this.building) {
if (!this.quiet) {
console.log('> Building')
}
this.building = true
}
if (this.quiet) {
return
}
log = deserialize(log)
const timer = setTimeout(() => {
this.buf.sort((a, b) => compare(a.log, b.log))
const idx = this.buf.findIndex(b => b.log.id === log.id) + 1
for (const b of this.buf.slice(0, idx)) {
clearTimeout(b.timer)
this.printLog(b.log)
}
this.buf = this.buf.slice(idx)
}, 500)
this.buf.push({ log, timer })
}
onComplete() {
this.socket.disconnect()
if (this.building) {
this.building = false
}
this.buf.sort((a, b) => compare(a.log, b.log))
// Flush all buffer
for (const b of this.buf) {
clearTimeout(b.timer)
this.printLog(b.log)
}
this.buf = []
this.emit('close')
}
onSocketError(err) {
if (this.debug) {
console.log(`> [debug] Socket error ${err}\n${err.stack}`)
}
}
printLog(log) {
if (this.printed.has(log.id)) return
this.printed.add(log.id)
const data = log.object ? JSON.stringify(log.object) : log.text
if (log.type === 'command') {
console.log(`${chalk.gray('>')}${data}`)
} else if (log.type === 'stderr') {
data.split('\n').forEach(v => {
if (v.length > 0) {
console.error(chalk.gray(`> ${v}`))
}
})
} else if (log.type === 'stdout') {
data.split('\n').forEach(v => {
if (v.length > 0) {
console.log(`${chalk.gray('>')} ${v}`)
}
})
}
}
}

91
src/providers/sh/legacy/error.js

@ -1,91 +0,0 @@
// Packages
const ms = require('ms')
const chalk = require('chalk')
const error = require('../../../util/output/error')
const info = require('../../../util/output/info')
function handleError(err, { debug = false } = {}) {
// Coerce Strings to Error instances
if (typeof err === 'string') {
err = new Error(err)
}
if (debug) {
console.log(`> [debug] handling error: ${err.stack}`)
}
if (err.status === 403) {
console.log(
error(
'Authentication error. Run `now -L` or `now --login` to log-in again.'
)
)
} else if (err.status === 429) {
if (err.retryAfter === 'never') {
console.log(error(err.message))
} else if (err.retryAfter === null) {
console.log(error('Rate limit exceeded error. Please try later.'))
} else {
console.log(
error(
'Rate limit exceeded error. Try again in ' +
ms(err.retryAfter * 1000, { long: true }) +
', or upgrade your account by running ' +
`${chalk.gray('`')}${chalk.cyan('now upgrade')}${chalk.gray('`')}`
)
)
}
} else if (err.userError) {
console.log(error(err.message))
} else if (err.status === 500) {
console.log(error('Unexpected server error. Please retry.'))
} else if (err.code === 'USER_ABORT') {
console.log(info('Aborted'))
} else {
console.log(
error(`Unexpected error. Please try again later. (${err.message})`)
)
}
}
async function responseError(res) {
let message
let userError
if (res.status >= 400 && res.status < 500) {
let body
try {
body = await res.json()
} catch (err) {
body = {}
}
// Some APIs wrongly return `err` instead of `error`
message = (body.error || body.err || {}).message
userError = true
} else {
userError = false
}
const err = new Error(message || 'Response error')
err.status = res.status
err.userError = userError
if (res.status === 429) {
const retryAfter = res.headers.get('Retry-After')
if (retryAfter) {
err.retryAfter = parseInt(retryAfter, 10)
}
}
return err
}
module.exports = {
handleError,
responseError,
error
}

385
src/providers/sh/legacy/get-files.js

@ -1,385 +0,0 @@
// Native
const { resolve } = require('path')
// Packages
const flatten = require('arr-flatten')
const unique = require('array-unique')
const ignore = require('ignore')
const _glob = require('glob')
const { stat, readdir, readFile } = require('fs-extra')
// Ours
const IGNORED = require('./ignored')
const glob = async function(pattern, options) {
return new Promise((resolve, reject) => {
_glob(pattern, options, (error, files) => {
if (error) {
reject(error)
} else {
resolve(files)
}
})
})
}
/**
* Remove leading `./` from the beginning of ignores
* because our parser doesn't like them :|
*/
const clearRelative = function(str) {
return str.replace(/(\n|^)\.\//g, '$1')
}
/**
* Returns the contents of a file if it exists.
*
* @return {String} results or `''`
*/
const maybeRead = async function(path, default_ = '') {
try {
return await readFile(path, 'utf8')
} catch (err) {
return default_
}
}
/**
* Transform relative paths into absolutes,
* and maintains absolutes as such.
*
* @param {String} maybe relative path
* @param {String} parent full path
*/
const asAbsolute = function(path, parent) {
if (path[0] === '/') {
return path
}
return resolve(parent, path)
}
/**
* Returns a list of files in the given
* directory that are subject to be
* synchronized for static deployments.
*
* @param {String} full path to directory
* @param {Object} options:
* - `limit` {Number|null} byte limit
* - `debug` {Boolean} warn upon ignore
* @return {Array} comprehensive list of paths to sync
*/
async function staticFiles(
path,
nowConfig = {},
{ limit = null, hasNowJson = false, debug = false } = {}
) {
const whitelist = nowConfig.files
// The package.json `files` whitelist still
// honors ignores: https://docs.npmjs.com/files/package.json#files
const search_ = whitelist || ['.']
// Convert all filenames into absolute paths
const search = Array.prototype.concat.apply(
[],
await Promise.all(
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true }))
)
)
// Compile list of ignored patterns and files
const gitIgnore = await maybeRead(resolve(path, '.gitignore'))
const filter = ignore()
.add(IGNORED + '\n' + clearRelative(gitIgnore))
.createFilter()
const prefixLength = path.length + 1
// The package.json `files` whitelist still
// honors npmignores: https://docs.npmjs.com/files/package.json#files
// but we don't ignore if the user is explicitly listing files
// under the now namespace, or using files in combination with gitignore
const accepts = file => {
const relativePath = file.substr(prefixLength)
if (relativePath === '') {
return true
}
const accepted = filter(relativePath)
if (!accepted && debug) {
console.log('> [debug] ignoring "%s"', file)
}
return accepted
}
// Locate files
if (debug) {
console.time(`> [debug] locating files ${path}`)
}
const files = await explode(search, {
accepts,
limit,
debug
})
if (debug) {
console.timeEnd(`> [debug] locating files ${path}`)
}
if (hasNowJson) {
files.push(asAbsolute('now.json', path))
}
// Get files
return unique(files)
}
/**
* Returns a list of files in the given
* directory that are subject to be
* synchronized for npm.
*
* @param {String} full path to directory
* @param {String} contents of `package.json` to avoid lookup
* @param {Object} options:
* - `limit` {Number|null} byte limit
* - `debug` {Boolean} warn upon ignore
* @return {Array} comprehensive list of paths to sync
*/
async function npm(
path,
pkg = {},
nowConfig = {},
{ limit = null, hasNowJson = false, debug = false } = {}
) {
const whitelist = nowConfig.files || pkg.files || (pkg.now && pkg.now.files)
// The package.json `files` whitelist still
// honors ignores: https://docs.npmjs.com/files/package.json#files
const search_ = whitelist || ['.']
// Convert all filenames into absolute paths
const search = Array.prototype.concat.apply(
[],
await Promise.all(
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true }))
)
)
// Compile list of ignored patterns and files
const npmIgnore = await maybeRead(resolve(path, '.npmignore'), null)
const gitIgnore =
npmIgnore === null ? await maybeRead(resolve(path, '.gitignore')) : null
const filter = ignore()
.add(
IGNORED + '\n' + clearRelative(npmIgnore === null ? gitIgnore : npmIgnore)
)
.createFilter()
const prefixLength = path.length + 1
// The package.json `files` whitelist still
// honors npmignores: https://docs.npmjs.com/files/package.json#files
// but we don't ignore if the user is explicitly listing files
// under the now namespace, or using files in combination with gitignore
const overrideIgnores =
(pkg.now && pkg.now.files) ||
nowConfig.files ||
(gitIgnore !== null && pkg.files)
const accepts = overrideIgnores
? () => true
: file => {
const relativePath = file.substr(prefixLength)
if (relativePath === '') {
return true
}
const accepted = filter(relativePath)
if (!accepted && debug) {
console.log('> [debug] ignoring "%s"', file)
}
return accepted
}
// Locate files
if (debug) {
console.time(`> [debug] locating files ${path}`)
}
const files = await explode(search, {
accepts,
limit,
debug
})
if (debug) {
console.timeEnd(`> [debug] locating files ${path}`)
}
// Always include manifest as npm does not allow ignoring it
// source: https://docs.npmjs.com/files/package.json#files
files.push(asAbsolute('package.json', path))
if (hasNowJson) {
files.push(asAbsolute('now.json', path))
}
// Get files
return unique(files)
}
/**
* Returns a list of files in the given
* directory that are subject to be
* sent to docker as build context.
*
* @param {String} full path to directory
* @param {String} contents of `Dockerfile`
* @param {Object} options:
* - `limit` {Number|null} byte limit
* - `debug` {Boolean} warn upon ignore
* @return {Array} comprehensive list of paths to sync
*/
async function docker(
path,
nowConfig = {},
{ limit = null, hasNowJson = false, debug = false } = {}
) {
const whitelist = nowConfig.files
// Base search path
// the now.json `files` whitelist still
// honors ignores: https://docs.npmjs.com/files/package.json#files
const search_ = whitelist || ['.']
// Convert all filenames into absolute paths
const search = search_.map(file => asAbsolute(file, path))
// Compile list of ignored patterns and files
const dockerIgnore = await maybeRead(resolve(path, '.dockerignore'), null)
const filter = ignore()
.add(
IGNORED +
'\n' +
clearRelative(
dockerIgnore === null
? await maybeRead(resolve(path, '.gitignore'))
: dockerIgnore
)
)
.createFilter()
const prefixLength = path.length + 1
const accepts = function(file) {
const relativePath = file.substr(prefixLength)
if (relativePath === '') {
return true
}
const accepted = filter(relativePath)
if (!accepted && debug) {
console.log('> [debug] ignoring "%s"', file)
}
return accepted
}
// Locate files
if (debug) {
console.time(`> [debug] locating files ${path}`)
}
const files = await explode(search, { accepts, limit, debug })
if (debug) {
console.timeEnd(`> [debug] locating files ${path}`)
}
// Always include manifest as npm does not allow ignoring it
// source: https://docs.npmjs.com/files/package.json#files
files.push(asAbsolute('Dockerfile', path))
if (hasNowJson) {
files.push(asAbsolute('now.json', path))
}
// Get files
return unique(files)
}
/**
* Explodes directories into a full list of files.
* Eg:
* in: ['/a.js', '/b']
* out: ['/a.js', '/b/c.js', '/b/d.js']
*
* @param {Array} of {String}s representing paths
* @param {Array} of ignored {String}s.
* @param {Object} options:
* - `limit` {Number|null} byte limit
* - `debug` {Boolean} warn upon ignore
* @return {Array} of {String}s of full paths
*/
async function explode(paths, { accepts, debug }) {
const list = async file => {
let path = file
let s
if (!accepts(file)) {
return null
}
try {
s = await stat(path)
} catch (e) {
// In case the file comes from `files`
// and it wasn't specified with `.js` by the user
path = file + '.js'
try {
s = await stat(path)
} catch (e2) {
if (debug) {
console.log('> [debug] ignoring invalid file "%s"', file)
}
return null
}
}
if (s.isDirectory()) {
const all = await readdir(file)
/* eslint-disable no-use-before-define */
return many(all.map(subdir => asAbsolute(subdir, file)))
/* eslint-enable no-use-before-define */
} else if (!s.isFile()) {
if (debug) {
console.log('> [debug] ignoring special file "%s"', file)
}
return null
}
return path
}
const many = all => Promise.all(all.map(file => list(file)))
return flatten(await many(paths)).filter(v => v !== null)
}
module.exports = {
npm,
docker,
staticFiles
}

221
src/providers/sh/legacy/git.js

@ -1,221 +0,0 @@
// Native
const path = require('path')
const url = require('url')
const childProcess = require('child_process')
// Packages
const fs = require('fs-extra')
const download = require('download')
const tmp = require('tmp-promise')
const isURL = require('is-url')
const cloneRepo = (parts, tmpDir, { ssh }) =>
new Promise((resolve, reject) => {
let host
switch (parts.type) {
case 'GitLab':
host = `gitlab.com`
break
case 'Bitbucket':
host = `bitbucket.org`
break
default:
host = `github.com`
}
const url = ssh
? `git@${host}:${parts.main}`
: `https://${host}/${parts.main}`
const ref = parts.ref || (parts.type === 'Bitbucket' ? 'default' : 'master')
const cmd = `git clone ${url} --single-branch ${ref}`
childProcess.exec(cmd, { cwd: tmpDir.path }, (err, stdout) => {
if (err) {
reject(err)
}
resolve(stdout)
})
})
const renameRepoDir = async (pathParts, tmpDir) => {
const tmpContents = await fs.readdir(tmpDir.path)
const oldTemp = path.join(tmpDir.path, tmpContents[0])
const newTemp = path.join(tmpDir.path, pathParts.main.replace('/', '-'))
await fs.rename(oldTemp, newTemp)
tmpDir.path = newTemp
return tmpDir
}
const capitalizePlatform = name => {
const names = {
github: 'GitHub',
gitlab: 'GitLab',
bitbucket: 'Bitbucket'
}
return names[name]
}
const splittedURL = fullURL => {
const parsedURL = url.parse(fullURL)
const pathParts = parsedURL.path.split('/')
pathParts.shift()
// Set path to repo...
const main = pathParts[0] + '/' + pathParts[1]
// ...and then remove it from the parts
pathParts.splice(0, 2)
// Assign Git reference
let ref = pathParts.length >= 2 ? pathParts[1] : ''
// Firstly be sure that we haven know the ref type
if (pathParts[0]) {
// Then shorten the SHA of the commit
if (pathParts[0] === 'commit' || pathParts[0] === 'commits') {
ref = ref.substring(0, 7)
}
}
// We're deploying master by default,
// so there's no need to indicate it explicitly
if (ref === 'master') {
ref = ''
}
return {
main,
ref,
type: capitalizePlatform(parsedURL.host.split('.')[0])
}
}
const gitPathParts = main => {
let ref = ''
if (isURL(main)) {
return splittedURL(main)
}
if (main.split('/')[1].includes('#')) {
const parts = main.split('#')
ref = parts[1]
main = parts[0]
}
return {
main,
ref,
type: capitalizePlatform('github')
}
}
const downloadRepo = async repoPath => {
const pathParts = gitPathParts(repoPath)
const tmpDir = await tmp.dir({
// We'll remove it manually once deployment is done
keep: true,
// Recursively remove directory when calling respective method
unsafeCleanup: true
})
let gitInstalled = true
try {
await cloneRepo(pathParts, tmpDir)
} catch (err) {
try {
await cloneRepo(pathParts, tmpDir, { ssh: true })
} catch (err) {
gitInstalled = false
}
}
if (gitInstalled) {
const renaming = await renameRepoDir(pathParts, tmpDir)
return renaming
}
let url
switch (pathParts.type) {
case 'GitLab': {
const ref = pathParts.ref ? `?ref=${pathParts.ref}` : ''
url = `https://gitlab.com/${pathParts.main}/repository/archive.tar` + ref
break
}
case 'Bitbucket':
url = `https://bitbucket.org/${pathParts.main}/get/${pathParts.ref ||
'default'}.zip`
break
default:
url = `https://api.github.com/repos/${pathParts.main}/tarball/${pathParts.ref}`
}
try {
await download(url, tmpDir.path, {
extract: true
})
} catch (err) {
tmpDir.cleanup()
return false
}
const renaming = await renameRepoDir(pathParts, tmpDir)
return renaming
}
const isRepoPath = path => {
if (!path) {
return false
}
const allowedHosts = ['github.com', 'gitlab.com', 'bitbucket.org']
if (isURL(path)) {
const urlParts = url.parse(path)
const slashSplitted = urlParts.path.split('/').filter(n => n)
const notBare = slashSplitted.length >= 2
if (allowedHosts.includes(urlParts.host) && notBare) {
return true
}
const err = new Error(`Host "${urlParts.host}" is unsupported.`)
err.code = 'INVALID_URL'
err.userError = true
throw err
}
return /[^\s\\]\/[^\s\\]/g.test(path)
}
const fromGit = async (path, debug) => {
let tmpDir = false
try {
tmpDir = await downloadRepo(path)
} catch (err) {
if (debug) {
console.log(`Could not download "${path}" repo from GitHub`)
}
}
return tmpDir
}
module.exports = {
gitPathParts,
isRepoPath,
fromGit
}

44
src/providers/sh/legacy/hash.js

@ -1,44 +0,0 @@
// Native
const { createHash } = require('crypto')
// Packages
const { readFile } = require('fs-extra')
/**
* Computes hashes for the contents of each file given.
*
* @param {Array} of {String} full paths
* @return {Map}
*/
async function hashes(files) {
const map = new Map()
await Promise.all(
files.map(async name => {
const data = await readFile(name)
const h = hash(data)
const entry = map.get(h)
if (entry) {
entry.names.push(name)
} else {
map.set(hash(data), { names: [name], data })
}
})
)
return map
}
/**
* Computes a hash for the given buf.
*
* @param {Buffer} file data
* @return {String} hex digest
*/
function hash(buf) {
return createHash('sha1').update(buf).digest('hex')
}
module.exports = hashes

17
src/providers/sh/legacy/ignored.js

@ -1,17 +0,0 @@
// Base `.gitignore` to which we add entries
// supplied by the user
module.exports = `.hg
.git
.gitmodules
.svn
.npmignore
.dockerignore
.gitignore
.*.swp
.DS_Store
.wafpicke-*
.lock-wscript
npm-debug.log
config.gypi
node_modules
CVS`

14
src/providers/sh/legacy/logs.js

@ -1,14 +0,0 @@
exports.compare = function(a, b) {
return (
a.serial.localeCompare(b.serial) ||
// For the case serials are a same value on old logs
a.created.getTime() - b.created.getTime()
)
}
exports.deserialize = function(log) {
return Object.assign({}, log, {
date: new Date(log.date),
created: new Date(log.created)
})
}

1031
src/providers/sh/legacy/now.js

File diff suppressed because it is too large

57
src/providers/sh/legacy/plans.js

@ -1,57 +0,0 @@
const ms = require('ms')
const Now = require('./now')
async function parsePlan(json) {
const { subscription } = json
let id
let until
let name
if (subscription) {
const planItems = subscription.items.data
const mainPlan = planItems.find(d => d.plan.metadata.is_main_plan === '1')
if (mainPlan) {
id = mainPlan.plan.id
name = mainPlan.plan.name
if (subscription.cancel_at_period_end) {
until = ms(
new Date(subscription.current_period_end * 1000) - new Date(),
{ long: true }
)
}
} else {
id = 'oss'
}
} else {
id = 'oss'
}
return { id, name, until }
}
module.exports = class Plans extends Now {
async getCurrent() {
const res = await this._fetch('/plan')
const json = await res.json()
return parsePlan(json)
}
async set(plan) {
const res = await this._fetch('/plan', {
method: 'PUT',
body: { plan }
})
const json = await res.json()
if (res.ok) {
return parsePlan(json)
}
const err = new Error(json.error.message)
err.code = json.error.code
throw err
}
}

195
src/providers/sh/legacy/read-metadata.js

@ -1,195 +0,0 @@
// Native
const { basename, resolve: resolvePath } = require('path')
// Packages
const chalk = require('chalk')
const { readFile } = require('fs-extra')
const { parse: parseDockerfile } = require('docker-file-parser')
const determineType = require('deployment-type')
module.exports = readMetaData
async function readMetaData(
path,
{
deploymentType,
deploymentName,
sessionAffinity,
quiet = false,
strict = true
}
) {
let description
let type = deploymentType
let name = deploymentName
let affinity = sessionAffinity
const pkg = await readJSON(path, 'package.json')
let nowConfig = await readJSON(path, 'now.json')
const dockerfile = await readDockerfile(path)
const hasNowJson = Boolean(nowConfig)
if (pkg && pkg.now) {
// If the project has both a `now.json` and `now` Object in the `package.json`
// file, then fail hard and let the user know that they need to pick one or the
// other
if (nowConfig) {
const err = new Error(
'You have a `now` configuration field inside `package.json` ' +
'but configuration is also present in `now.json`! ' +
"Please ensure there's a single source of configuration by removing one."
)
err.userError = true
throw err
} else {
nowConfig = pkg.now
}
}
// We can remove this once the prompt for choosing `--npm` or `--docker` is gone
if (pkg && pkg.now && pkg.now.type) {
type = nowConfig.type
}
// The same goes for this
if (nowConfig && nowConfig.type) {
type = nowConfig.type
}
if (!type) {
type = await determineType(path)
// Both `package.json` and `Dockerfile` exist! Prompt the user to pick one.
// We can remove this soon (details are internal) - also read the comment paragraph above
if (type === 'docker' && (pkg && dockerfile)) {
const err = new Error(
'Ambiguous deployment (`package.json` and `Dockerfile` found). ' +
'Please supply `--npm` or `--docker` to disambiguate.'
)
err.userError = true
err.code = 'MULTIPLE_MANIFESTS'
throw err
}
}
if (!name && nowConfig) {
name = nowConfig.name
}
if (!affinity && nowConfig) {
affinity = nowConfig.sessionAffinity
}
if (type === 'npm') {
if (pkg) {
if (!name && pkg.now && pkg.now.name) {
name = String(pkg.now.name)
}
if (!name && pkg.name) {
name = String(pkg.name)
}
description = pkg.description
}
} else if (type === 'docker') {
if (strict && dockerfile.length <= 0) {
const err = new Error('No commands found in `Dockerfile`')
err.userError = true
throw err
}
const labels = {}
dockerfile.filter(cmd => cmd.name === 'LABEL').forEach(({ args }) => {
for (const key in args) {
if (!{}.hasOwnProperty.call(args, key)) {
continue
}
// Unescape and convert into string
try {
labels[key] = args[key]
} catch (err) {
const e = new Error(
`Error parsing value for LABEL ${key} in \`Dockerfile\``
)
e.userError = true
throw e
}
}
})
if (!name) {
name = labels.name
}
description = labels.description
} else if (type === 'static') {
// Do nothing
} else {
throw new TypeError(`Unsupported "deploymentType": ${type}`)
}
// No name in `package.json` / `now.json`, or "name" label in Dockerfile.
// Default to the basename of the root dir
if (!name) {
name = basename(path)
if (!quiet && type !== 'static') {
if (type === 'docker') {
console.log(
`> No \`name\` LABEL in \`Dockerfile\`, using ${chalk.bold(name)}`
)
} else {
console.log(
`> No \`name\` in \`package.json\`, using ${chalk.bold(name)}`
)
}
}
}
return {
name,
description,
type,
pkg,
nowConfig,
hasNowJson,
// XXX: legacy
deploymentType: type,
sessionAffinity: affinity
}
}
async function readJSON(path, name) {
try {
const contents = await readFile(resolvePath(path, name), 'utf8')
return JSON.parse(contents)
} catch (err) {
// If the file doesn't exist then that's fine; any other error bubbles up
if (err.code !== 'ENOENT') {
err.userError = true
throw err
}
}
}
async function readDockerfile(path, name = 'Dockerfile') {
try {
const contents = await readFile(resolvePath(path, name), 'utf8')
return parseDockerfile(contents, { includeComments: true })
} catch (err) {
// If the file doesn't exist then that's fine; any other error bubbles up
if (err.code !== 'ENOENT') {
err.userError = true
throw err
}
}
}

20
src/providers/sh/legacy/to-host.js

@ -1,20 +0,0 @@
// Native
const { parse } = require('url')
/**
* Converts a valid deployment lookup parameter to a hostname.
* `http://google.com` => google.com
* google.com => google.com
*/
function toHost(url) {
if (/^https?:\/\//.test(url)) {
return parse(url).host
}
// Remove any path if present
// `a.b.c/` => `a.b.c`
return url.replace(/(\/\/)?([^/]+)(.*)/, '$2')
}
module.exports = toHost
Loading…
Cancel
Save