From 19a2e55a9087f30c202e72925752a8c088f0990d Mon Sep 17 00:00:00 2001 From: Guillermo Rauch Date: Thu, 12 May 2016 12:41:18 -0700 Subject: [PATCH] migrate to api.zeit.co and add warnings support --- bin/now-deploy | 2 +- lib/get-files.js | 7 ------- lib/index.js | 41 +++++++++++++++++++++++++++++------------ 3 files changed, 30 insertions(+), 20 deletions(-) diff --git a/bin/now-deploy b/bin/now-deploy index 9a6d7aa..89c7487 100755 --- a/bin/now-deploy +++ b/bin/now-deploy @@ -51,7 +51,7 @@ const clipboard = !(argv.noClipboard || argv.C); const force = argv.f || argv.force; const forceSync = argv.F || argv.forceSync; const shouldLogin = argv.L || argv.login; -const apiUrl = argv.url || 'https://api.now.sh'; +const apiUrl = argv.url || 'https://api.zeit.co'; const config = cfg.read(); diff --git a/lib/get-files.js b/lib/get-files.js index cf54b64..9408cc8 100644 --- a/lib/get-files.js +++ b/lib/get-files.js @@ -1,4 +1,3 @@ -import bytes from 'bytes'; import flatten from 'arr-flatten'; import unique from 'array-unique'; import minimatch from 'minimatch'; @@ -143,12 +142,6 @@ const explode = async function (paths, ignored, { limit, debug }) { const all = await readdir(file); return many(all.map(subdir => asAbsolute(subdir, file))); } else { - if (null != limit && s.size > limit) { - console.error('> \u001b[31mWarning!\u001b[39m Skipping file ' + - `over ${bytes(limit)}: ${path}`); - return null; - } - return path; } }; diff --git a/lib/index.js b/lib/index.js index 75d0ac3..dc6a427 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,7 +1,8 @@ +import bytes from 'bytes'; +import chalk from 'chalk'; import getFiles from './get-files'; import hash from './hash'; import retry from './retry'; -import bytes from 'bytes'; import Agent from './agent'; import EventEmitter from 'events'; import { basename, resolve } from 'path'; @@ -9,9 +10,6 @@ import { stat, readFile } from 'fs-promise'; import resumer from 'resumer'; import splitArray from 'split-array'; -// limit of size of files to find -const ONEMB = bytes('1mb'); - // how many concurrent HTTP/2 stream uploads const MAX_CONCURRENT = 10; @@ -39,7 +37,6 @@ export default class Now extends EventEmitter { e.userError = true; throw e; } - let pkg; try { pkg = await readFile(resolve(path, 'package.json')); @@ -64,7 +61,7 @@ export default class Now extends EventEmitter { } if (this._debug) console.time('> [debug] Getting files'); - const files = await getFiles(path, pkg, { limit: ONEMB, debug: this._debug }); + const files = await getFiles(path, pkg, { debug: this._debug }); if (this._debug) console.timeEnd('> [debug] Getting files'); if (this._debug) console.time('> [debug] Computing hashes'); @@ -74,8 +71,8 @@ export default class Now extends EventEmitter { this._files = hashes; const deployment = await retry(async (bail) => { - if (this._debug) console.time('> [debug] /create'); - const res = await this._fetch('/create', { + if (this._debug) console.time('> [debug] /now/create'); + const res = await this._fetch('/now/create', { method: 'POST', body: { forceNew, @@ -95,7 +92,7 @@ export default class Now extends EventEmitter { })) } }); - if (this._debug) console.timeEnd('> [debug] /create'); + if (this._debug) console.timeEnd('> [debug] /now/create'); // no retry on 4xx if (200 !== res.status && (400 <= res.status && 500 > res.status)) { @@ -112,6 +109,26 @@ export default class Now extends EventEmitter { return res.json(); }, { retries: 3, minTimeout: 2500, onRetry: this._onRetry }); + // we report about files whose sizes are too big + if (deployment.warnings) { + let sizeExceeded = 0; + deployment.warnings.forEach(({ reason, sha, limit }) => { + if ('size_exceeded' === reason) { + console.error('> \u001b[31mWarning!\u001b[39m Skipping file %s (size exceeded %s)', + hashes.get(sha), + bytes(limit) + ); + sizeExceeded++; + } + }); + + if (sizeExceeded) { + console.log('> \u001b[31mWarning!\u001b[39m %d of the files ' + + 'exceeded the limit for your plan.\n' + + `> See ${chalk.underline('https://zeit.co/account')} to upgrade.`); + } + } + this._id = deployment.deploymentId; this._host = deployment.url; this._missing = deployment.missing || []; @@ -136,7 +153,7 @@ export default class Now extends EventEmitter { if (this._debug) console.time(`> [debug] /sync ${names.join(' ')}`); const stream = resumer().queue(data).end(); - const res = await this._fetch('/sync', { + const res = await this._fetch('/now/sync', { method: 'POST', headers: { 'Content-Type': 'application/octet-stream', @@ -170,7 +187,7 @@ export default class Now extends EventEmitter { const { deployments } = await retry(async (bail) => { if (this._debug) console.time('> [debug] /list'); - const res = await this._fetch('/list' + query); + const res = await this._fetch('/now/list' + query); if (this._debug) console.timeEnd('> [debug] /list'); // no retry on 4xx @@ -196,7 +213,7 @@ export default class Now extends EventEmitter { await retry(async (bail) => { if (this._debug) console.time('> [debug] /remove'); - const res = await this._fetch('/remove', { + const res = await this._fetch('/now/remove', { method: 'DELETE', body: data });