Browse Source

show top author dependencies by download count

master
Feross Aboukhadijeh 7 years ago
parent
commit
4773b38dfe
  1. 156
      cmd.js
  2. 1
      package.json

156
cmd.js

@ -1,53 +1,55 @@
#!/usr/bin/env node #!/usr/bin/env node
const chalk = require('chalk') const chalk = require('chalk')
const got = require('got') // TODO: use simple-peer when it supports promises
const minimist = require('minimist') const minimist = require('minimist')
const pify = require('pify') const pify = require('pify')
const pkgDir = require('pkg-dir') const pkgDir = require('pkg-dir')
const readPackageTree = require('read-package-tree') const readPackageTree = require('read-package-tree')
const RegistryClient = require('npm-registry-client') // TODO: use npm-registry-fetch const RegistryClient = require('npm-registry-client') // TODO: use npm-registry-fetch when done
const registryUrl = require('registry-url') const registryUrl = require('registry-url')
const stripAnsi = require('strip-ansi') const stripAnsi = require('strip-ansi')
const textTable = require('text-table') const textTable = require('text-table')
const donees = require('./') const thanks = require('./')
const DOWNLOADS_URL = 'https://api.npmjs.org/downloads/point/last-month/'
const DOWNLOADS_URL_LIMIT = 128
const readPackageTreeAsync = pify(readPackageTree) const readPackageTreeAsync = pify(readPackageTree)
init().catch(handleError) init().catch(handleError)
async function init () { async function init () {
const argv = minimist(process.argv.slice(2)) const client = createRegistryClient()
const argv = minimist(process.argv.slice(2))
const cwd = argv._[0] || process.cwd() const cwd = argv._[0] || process.cwd()
const authors = {}
const client = createRegistryClient()
// Get all packages in the nearest `node_modules` folder // Get all packages in the nearest `node_modules` folder
const rootPath = await pkgDir(cwd) const rootPath = await pkgDir(cwd)
const packageTree = await readPackageTreeAsync(rootPath) const packageTree = await readPackageTreeAsync(rootPath)
const pkgNames = packageTree.children.map(node => node.package.name)
// Get latest registry data on each local package, since the local data does // Get latest registry data on each local package, since the local data does
// not include the list of maintainers // not include the list of maintainers
const pkgs = await Promise.all(pkgNames.map(fetchPkg)) const pkgNames = packageTree.children.map(node => node.package.name)
const allPkgs = await Promise.all(pkgNames.map(fetchPkg))
pkgs.forEach(pkg => { // Fetch download counts for each package
pkg.maintainers const downloadCounts = await bulkFetchDownloads(pkgNames)
.map(maintainer => maintainer.name)
.forEach(author => addPackageAuthor(pkg.name, author))
})
const rows = Object.keys(authors) const authorInfos = computeAuthorInfos(allPkgs, downloadCounts)
.filter(author => donees.authors[author] != null)
.sort((author1, author2) => authors[author2].length - authors[author1].length) const rows = Object.keys(authorInfos)
.filter(author => thanks.authors[author] != null)
.sort((author1, author2) => authorInfos[author2].length - authorInfos[author1].length)
.map(author => { .map(author => {
const deps = authors[author] const authorPkgs = authorInfos[author]
const donateLink = thanks.authors[author]
return [ return [
chalk.green(author), chalk.green(author),
donees.authors[author], donateLink,
`${deps.length} packages including ${deps.slice(0, 3).join(', ')}` `${authorPkgs.length} packages including ${authorPkgs.slice(0, 2).join(', ')}`
] ]
}) })
@ -58,33 +60,15 @@ async function init () {
]) ])
const tableOpts = { const tableOpts = {
// align: ['l', 'l', 'l'],
stringLength: str => stripAnsi(str).length stringLength: str => stripAnsi(str).length
} }
const table = textTable(rows, tableOpts) const table = textTable(rows, tableOpts)
console.log(table) console.log(table)
function createRegistryClient () {
const opts = {
log: {
error () {},
http () {},
info () {},
silly () {},
verbose () {},
warn () {}
}
}
const client = new RegistryClient(opts)
client.getAsync = pify(client.get.bind(client))
return client
}
async function fetchPkg (pkgName) { async function fetchPkg (pkgName) {
// The registry does not support fetching versions for scoped packages // The registry does not support fetching versions for scoped packages
const isScopedPackage = pkgName.includes('/') const url = isScopedPkg(pkgName)
const url = isScopedPackage
? `${registryUrl()}${pkgName.replace('/', '%2F')}` ? `${registryUrl()}${pkgName.replace('/', '%2F')}`
: `${registryUrl()}${pkgName}/latest` : `${registryUrl()}${pkgName}/latest`
@ -94,46 +78,78 @@ async function init () {
} }
return client.getAsync(url, opts) return client.getAsync(url, opts)
} }
}
function addPackageAuthor (pkgName, author) { function createRegistryClient () {
if (authors[author] == null) authors[author] = [] const opts = {
authors[author].push(pkgName) log: {
error () {},
http () {},
info () {},
silly () {},
verbose () {},
warn () {}
}
} }
const client = new RegistryClient(opts)
client.getAsync = pify(client.get.bind(client))
return client
}
// const rootPkg = await fetchLocalPkg() function isScopedPkg (pkgName) {
return pkgName.includes('/')
}
// const rootDeps = [].concat( /**
// findDeps(rootPkg, 'dependencies'), * A few notes:
// findDeps(rootPkg, 'devDependencies'), * - bulk queries do not support scoped packages
// findDeps(rootPkg, 'optionalDependencies') * - bulk queries are limited to at most 128 packages at a time
// ) */
async function bulkFetchDownloads (pkgNames) {
const downloads = {}
const normalPkgNames = pkgNames.filter(pkgName => !isScopedPkg(pkgName))
const scopedPkgNames = pkgNames.filter(isScopedPkg)
for (let start = 0; start < normalPkgNames.length; start += DOWNLOADS_URL_LIMIT) {
const pkgNamesSubset = normalPkgNames.slice(start, start + DOWNLOADS_URL_LIMIT)
const url = DOWNLOADS_URL + pkgNamesSubset.join(',')
const res = await got(url, { json: true })
Object.keys(res.body).forEach(pkgName => {
downloads[pkgName] = res.body[pkgName].downloads
})
}
// const queue = [].push(...rootDeps) await Promise.all(scopedPkgNames.map(async scopedPkgName => {
const url = DOWNLOADS_URL + scopedPkgName
const res = await got(url, { json: true })
downloads[scopedPkgName] = res.body.downloads
}))
// while (queue.length > 0) { return downloads
// const pkgs = await Promise.all(queue.slice(0, CONCURRENCY).map(fetchPkg))
// }
} }
// async function fetchLocalPkg () { function computeAuthorInfos (pkgs, downloadCounts) {
// const pkgPath = await pkgUp() // author name -> array of package names
// const pkgStr = await readFileAsync(pkgPath, 'utf8') const authorInfos = {}
// try { pkgs.forEach(pkg => {
// const pkg = JSON.parse(pkgStr) pkg.maintainers
// normalizePackage(pkg) .map(maintainer => maintainer.name)
// return pkg .forEach(author => {
// } catch (err) { if (authorInfos[author] == null) authorInfos[author] = []
// err.message = `Failed to parse package.json: ${err.message}` authorInfos[author].push(pkg.name)
// throw err })
// } })
// }
// Sort each author's package list by download count
// function findDeps (pkg, type) { Object.keys(authorInfos).forEach(author => {
// return pkg[type] && typeof pkg[type] === 'object' const pkgs = authorInfos[author]
// ? Object.keys(pkg[type]) pkgs.sort((pkg1, pkg2) => downloadCounts[pkg2] - downloadCounts[pkg1])
// : [] })
// }
return authorInfos
}
function handleError (err) { function handleError (err) {
console.error(`thanks: Error: ${err.message}`) console.error(`thanks: Error: ${err.message}`)

1
package.json

@ -13,6 +13,7 @@
}, },
"dependencies": { "dependencies": {
"chalk": "^2.3.0", "chalk": "^2.3.0",
"got": "^8.0.3",
"minimist": "^1.2.0", "minimist": "^1.2.0",
"npm-registry-client": "^8.5.0", "npm-registry-client": "^8.5.0",
"pify": "^3.0.0", "pify": "^3.0.0",

Loading…
Cancel
Save