Browse Source

cap maximum uploads due to nginx 128 streams limit

master
Guillermo Rauch 9 years ago
parent
commit
9a8aa019c3
  1. 20
      lib/index.js
  2. 3
      package.json

20
lib/index.js

@ -7,10 +7,14 @@ import EventEmitter from 'events';
import { basename, resolve } from 'path';
import { stat, readFile } from 'fs-promise';
import resumer from 'resumer';
import splitArray from 'split-array';
// limit of size of files to find
const ONEMB = bytes('1mb');
// how many concurrent HTTP/2 stream uploads
const MAX_CONCURRENT = 10;
export default class Now extends EventEmitter {
constructor (token, { forceNew = false, debug = false }) {
super();
@ -100,7 +104,16 @@ export default class Now extends EventEmitter {
}
upload () {
Promise.all(this._missing.map((sha) => retry(async (bail) => {
const parts = splitArray(this._missing, MAX_CONCURRENT);
if (this._debug) {
console.log('> [debug] Will upload ' +
`${this._missing.length} files in ${parts.length} ` +
`steps of ${MAX_CONCURRENT} uploads.`);
}
const uploadChunk = () => {
Promise.all(parts.shift().map((sha) => retry(async (bail) => {
const file = this._files.get(sha);
const { data, name } = file;
@ -129,8 +142,11 @@ export default class Now extends EventEmitter {
this.emit('upload', file);
}, { retries: 5, randomize: true, onRetry: this._onRetry })))
.then(() => this.emit('complete'))
.then(() => parts.length ? uploadChunk() : this.emit('complete'))
.catch((err) => this.emit('error', err));
};
uploadChunk();
}
_onRetry (err) {

3
package.json

@ -25,7 +25,8 @@
"progress": "1.1.8",
"resumer": "0.0.0",
"retry": "0.9.0",
"spdy": "3.2.3"
"spdy": "3.2.3",
"split-array": "1.0.1"
},
"devDependencies": {
"alpha-sort": "1.0.2",

Loading…
Cancel
Save