Guillermo Rauch
8 years ago
commit
4c5a0a0050
91 changed files with 11568 additions and 0 deletions
@ -0,0 +1,9 @@ |
|||
[ignore] |
|||
|
|||
[include] |
|||
|
|||
[libs] |
|||
|
|||
[options] |
|||
|
|||
[lints] |
@ -0,0 +1,2 @@ |
|||
node_modules |
|||
out |
@ -0,0 +1,191 @@ |
|||
|
|||
Apache License |
|||
Version 2.0, January 2004 |
|||
https://www.apache.org/licenses/ |
|||
|
|||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION |
|||
|
|||
1. Definitions. |
|||
|
|||
"License" shall mean the terms and conditions for use, reproduction, |
|||
and distribution as defined by Sections 1 through 9 of this document. |
|||
|
|||
"Licensor" shall mean the copyright owner or entity authorized by |
|||
the copyright owner that is granting the License. |
|||
|
|||
"Legal Entity" shall mean the union of the acting entity and all |
|||
other entities that control, are controlled by, or are under common |
|||
control with that entity. For the purposes of this definition, |
|||
"control" means (i) the power, direct or indirect, to cause the |
|||
direction or management of such entity, whether by contract or |
|||
otherwise, or (ii) ownership of fifty percent (50%) or more of the |
|||
outstanding shares, or (iii) beneficial ownership of such entity. |
|||
|
|||
"You" (or "Your") shall mean an individual or Legal Entity |
|||
exercising permissions granted by this License. |
|||
|
|||
"Source" form shall mean the preferred form for making modifications, |
|||
including but not limited to software source code, documentation |
|||
source, and configuration files. |
|||
|
|||
"Object" form shall mean any form resulting from mechanical |
|||
transformation or translation of a Source form, including but |
|||
not limited to compiled object code, generated documentation, |
|||
and conversions to other media types. |
|||
|
|||
"Work" shall mean the work of authorship, whether in Source or |
|||
Object form, made available under the License, as indicated by a |
|||
copyright notice that is included in or attached to the work |
|||
(an example is provided in the Appendix below). |
|||
|
|||
"Derivative Works" shall mean any work, whether in Source or Object |
|||
form, that is based on (or derived from) the Work and for which the |
|||
editorial revisions, annotations, elaborations, or other modifications |
|||
represent, as a whole, an original work of authorship. For the purposes |
|||
of this License, Derivative Works shall not include works that remain |
|||
separable from, or merely link (or bind by name) to the interfaces of, |
|||
the Work and Derivative Works thereof. |
|||
|
|||
"Contribution" shall mean any work of authorship, including |
|||
the original version of the Work and any modifications or additions |
|||
to that Work or Derivative Works thereof, that is intentionally |
|||
submitted to Licensor for inclusion in the Work by the copyright owner |
|||
or by an individual or Legal Entity authorized to submit on behalf of |
|||
the copyright owner. For the purposes of this definition, "submitted" |
|||
means any form of electronic, verbal, or written communication sent |
|||
to the Licensor or its representatives, including but not limited to |
|||
communication on electronic mailing lists, source code control systems, |
|||
and issue tracking systems that are managed by, or on behalf of, the |
|||
Licensor for the purpose of discussing and improving the Work, but |
|||
excluding communication that is conspicuously marked or otherwise |
|||
designated in writing by the copyright owner as "Not a Contribution." |
|||
|
|||
"Contributor" shall mean Licensor and any individual or Legal Entity |
|||
on behalf of whom a Contribution has been received by Licensor and |
|||
subsequently incorporated within the Work. |
|||
|
|||
2. Grant of Copyright License. Subject to the terms and conditions of |
|||
this License, each Contributor hereby grants to You a perpetual, |
|||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable |
|||
copyright license to reproduce, prepare Derivative Works of, |
|||
publicly display, publicly perform, sublicense, and distribute the |
|||
Work and such Derivative Works in Source or Object form. |
|||
|
|||
3. Grant of Patent License. Subject to the terms and conditions of |
|||
this License, each Contributor hereby grants to You a perpetual, |
|||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable |
|||
(except as stated in this section) patent license to make, have made, |
|||
use, offer to sell, sell, import, and otherwise transfer the Work, |
|||
where such license applies only to those patent claims licensable |
|||
by such Contributor that are necessarily infringed by their |
|||
Contribution(s) alone or by combination of their Contribution(s) |
|||
with the Work to which such Contribution(s) was submitted. If You |
|||
institute patent litigation against any entity (including a |
|||
cross-claim or counterclaim in a lawsuit) alleging that the Work |
|||
or a Contribution incorporated within the Work constitutes direct |
|||
or contributory patent infringement, then any patent licenses |
|||
granted to You under this License for that Work shall terminate |
|||
as of the date such litigation is filed. |
|||
|
|||
4. Redistribution. You may reproduce and distribute copies of the |
|||
Work or Derivative Works thereof in any medium, with or without |
|||
modifications, and in Source or Object form, provided that You |
|||
meet the following conditions: |
|||
|
|||
(a) You must give any other recipients of the Work or |
|||
Derivative Works a copy of this License; and |
|||
|
|||
(b) You must cause any modified files to carry prominent notices |
|||
stating that You changed the files; and |
|||
|
|||
(c) You must retain, in the Source form of any Derivative Works |
|||
that You distribute, all copyright, patent, trademark, and |
|||
attribution notices from the Source form of the Work, |
|||
excluding those notices that do not pertain to any part of |
|||
the Derivative Works; and |
|||
|
|||
(d) If the Work includes a "NOTICE" text file as part of its |
|||
distribution, then any Derivative Works that You distribute must |
|||
include a readable copy of the attribution notices contained |
|||
within such NOTICE file, excluding those notices that do not |
|||
pertain to any part of the Derivative Works, in at least one |
|||
of the following places: within a NOTICE text file distributed |
|||
as part of the Derivative Works; within the Source form or |
|||
documentation, if provided along with the Derivative Works; or, |
|||
within a display generated by the Derivative Works, if and |
|||
wherever such third-party notices normally appear. The contents |
|||
of the NOTICE file are for informational purposes only and |
|||
do not modify the License. You may add Your own attribution |
|||
notices within Derivative Works that You distribute, alongside |
|||
or as an addendum to the NOTICE text from the Work, provided |
|||
that such additional attribution notices cannot be construed |
|||
as modifying the License. |
|||
|
|||
You may add Your own copyright statement to Your modifications and |
|||
may provide additional or different license terms and conditions |
|||
for use, reproduction, or distribution of Your modifications, or |
|||
for any such Derivative Works as a whole, provided Your use, |
|||
reproduction, and distribution of the Work otherwise complies with |
|||
the conditions stated in this License. |
|||
|
|||
5. Submission of Contributions. Unless You explicitly state otherwise, |
|||
any Contribution intentionally submitted for inclusion in the Work |
|||
by You to the Licensor shall be under the terms and conditions of |
|||
this License, without any additional terms or conditions. |
|||
Notwithstanding the above, nothing herein shall supersede or modify |
|||
the terms of any separate license agreement you may have executed |
|||
with Licensor regarding such Contributions. |
|||
|
|||
6. Trademarks. This License does not grant permission to use the trade |
|||
names, trademarks, service marks, or product names of the Licensor, |
|||
except as required for reasonable and customary use in describing the |
|||
origin of the Work and reproducing the content of the NOTICE file. |
|||
|
|||
7. Disclaimer of Warranty. Unless required by applicable law or |
|||
agreed to in writing, Licensor provides the Work (and each |
|||
Contributor provides its Contributions) on an "AS IS" BASIS, |
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
|||
implied, including, without limitation, any warranties or conditions |
|||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A |
|||
PARTICULAR PURPOSE. You are solely responsible for determining the |
|||
appropriateness of using or redistributing the Work and assume any |
|||
risks associated with Your exercise of permissions under this License. |
|||
|
|||
8. Limitation of Liability. In no event and under no legal theory, |
|||
whether in tort (including negligence), contract, or otherwise, |
|||
unless required by applicable law (such as deliberate and grossly |
|||
negligent acts) or agreed to in writing, shall any Contributor be |
|||
liable to You for damages, including any direct, indirect, special, |
|||
incidental, or consequential damages of any character arising as a |
|||
result of this License or out of the use or inability to use the |
|||
Work (including but not limited to damages for loss of goodwill, |
|||
work stoppage, computer failure or malfunction, or any and all |
|||
other commercial damages or losses), even if such Contributor |
|||
has been advised of the possibility of such damages. |
|||
|
|||
9. Accepting Warranty or Additional Liability. While redistributing |
|||
the Work or Derivative Works thereof, You may choose to offer, |
|||
and charge a fee for, acceptance of support, warranty, indemnity, |
|||
or other liability obligations and/or rights consistent with this |
|||
License. However, in accepting such obligations, You may act only |
|||
on Your own behalf and on Your sole responsibility, not on behalf |
|||
of any other Contributor, and only if You agree to indemnify, |
|||
defend, and hold each Contributor harmless for any liability |
|||
incurred by, or claims asserted against, such Contributor by reason |
|||
of your accepting any such warranty or additional liability. |
|||
|
|||
END OF TERMS AND CONDITIONS |
|||
|
|||
Copyright 2017 ZEIT, Inc. |
|||
|
|||
Licensed under the Apache License, Version 2.0 (the "License"); |
|||
you may not use this file except in compliance with the License. |
|||
You may obtain a copy of the License at |
|||
|
|||
https://www.apache.org/licenses/LICENSE-2.0 |
|||
|
|||
Unless required by applicable law or agreed to in writing, software |
|||
distributed under the License is distributed on an "AS IS" BASIS, |
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|||
See the License for the specific language governing permissions and |
|||
limitations under the License. |
@ -0,0 +1,297 @@ |
|||
|
|||
![now](https://github.com/zeit/art/blob/master/now-hosted/now-header.png?raw=true) |
|||
|
|||
## How it works |
|||
|
|||
Now enables instant immutable deployments to **any cloud provider** |
|||
with a simple API that's scalable, intuitive and optimized for collaboration. |
|||
|
|||
This is all it takes to deploy, for example, a Node.js project: |
|||
|
|||
``` |
|||
cd my-project |
|||
cat server.js |
|||
``` |
|||
|
|||
```js |
|||
require('http').createServer((req, res) => { |
|||
res.end('▲ Hello World') |
|||
}).listen(process.env.PORT) |
|||
``` |
|||
|
|||
and deploy! |
|||
|
|||
``` |
|||
now |
|||
``` |
|||
|
|||
The output of the `now` command will be a unique url to the deployment. No need for git. |
|||
|
|||
## Features |
|||
|
|||
- **Single command deployment**: `now`. |
|||
- **100% OSS** and licensed under Apache 2.0 |
|||
- **Serverless**. Worry about code, not servers. |
|||
- **Immutable**. Every time you write `now`, a new deployment is provisioned. |
|||
- **Pluggable**. Client can query any public and private cloud provider APIs |
|||
- **Flexible**. Interact with multiple clouds at once: `now gcp deploy && now aws deploy` |
|||
- **Single command setup**: `now [provider] login` |
|||
- **Secure**. All deployments are served over SSL |
|||
- **Dynamic and Static**. Deploy popular runtimes or static websites |
|||
- **Remote fs support**. Deploy any github project with `now project/repo`, gitlab with `gitlab://`. [PRs welcome](https://github.com/zeit/now/pulls)! |
|||
|
|||
## Installation |
|||
|
|||
To install the latest version: |
|||
|
|||
``` |
|||
npm install -g now@preview |
|||
``` |
|||
|
|||
Note: while the API has been in production for over a year, the different |
|||
providers are still under heavy development |
|||
|
|||
Optionally, you can clone this repo and run `npm run build` to |
|||
produce the [pkg](https://github.com/zeit/pkg) binaries. |
|||
|
|||
## Setup |
|||
|
|||
Configuration of one or more provides is necessary via `login` commands is necessary. If no logins are active and `now` |
|||
|
|||
Global configuration is stored as `~/.now/config.json`. |
|||
|
|||
Your default provider will be the first one you log in to. If you are logged into multiple providers and want to set |
|||
|
|||
``` |
|||
now config set provider gcp |
|||
``` |
|||
|
|||
### Now.sh |
|||
|
|||
``` |
|||
now login |
|||
``` |
|||
|
|||
To skip the configuration steps and deploy to `https://now.sh` |
|||
execute `now login` without any parameters, defaulting to the `sh` provider (equivalent to: `now sh login`). |
|||
|
|||
[Now.sh](https://zeit.co/now) is _**free** for open-source projects and static deployments_. It supports `Dockerfile`, `package.json` and static sites out of the box. All builds are reproducible and executed in the cloud. |
|||
|
|||
### AWS Lambda (`aws`) |
|||
|
|||
Run: |
|||
|
|||
``` |
|||
now aws login |
|||
``` |
|||
|
|||
If you have already run `aws configure` before, you will be offered |
|||
to synchronize your credentials. |
|||
|
|||
Serverless deployments are provisioned by using: |
|||
|
|||
- Lambda functions λ |
|||
- A proxy is automatically used to bridge the API between |
|||
HTTP and lambda functions and retain a consistent interface |
|||
- Certificate Manager |
|||
- API Gateway |
|||
|
|||
### Google Cloud Platform (`gcp`) |
|||
|
|||
``` |
|||
$ now gcp login |
|||
``` |
|||
|
|||
and follow the instructions! |
|||
|
|||
### Microsoft Azure (`az`) |
|||
|
|||
``` |
|||
$ now az login |
|||
``` |
|||
|
|||
and follow the instructions! |
|||
|
|||
## <span id="configuration">Project Configuration</span> |
|||
|
|||
<table> |
|||
<td>ℹ️</td><td>We welcome feedback from <a href="#community">the community</a>!</td> |
|||
</table> |
|||
|
|||
The v1 release of `now.json` includes the following specification: |
|||
|
|||
- `name` (optional, recommended) `String` |
|||
- `description` (optional, recommended) `String` |
|||
- `type` (optional, recommended). One of: |
|||
- `String` an unique identifier for the project type. The following |
|||
are recommended choices to be supported by every provider: |
|||
- `docker` |
|||
- `nodejs` |
|||
- `static` |
|||
- `Object` |
|||
when it's necessary to specify a version or multiple interacting runtimes. It's a dictionary of runtime identifier and [SemVer-compatible]() version. For example: |
|||
``` |
|||
{ "type": { "docker": "1.x.x" } } |
|||
``` |
|||
- `provider` (optional) indicates affinity to a certain provider |
|||
- `target` (optional) `String` |
|||
- specifies a directory or file to deploy. If relative, it's resolved |
|||
to the project directory. This is useful when a certain |
|||
deployment type (like `static`) has an output target, like an `out` |
|||
or `dist` directory. |
|||
- `env` (optional). One of |
|||
- `Object` a dictionary mapping the name of the environmental variable |
|||
to expose to the deployment and its value. |
|||
If the value begins with `@`, it's considered a |
|||
- `Array` a list of suggested environmental variables that the project |
|||
_might_ require to be deployed and function correctly |
|||
- `regions` - `Array` of `String` |
|||
- specifies one or more regition identifiers to deploy to. A wildcard |
|||
can be used to signify deployment to all supported regions by the |
|||
provider |
|||
- `files` - `Array` of `String` |
|||
- specifies a whitelist of what files have to be deployed |
|||
|
|||
To supply provider-specific configuration, you can include an arbitrary `Object` and use the provider identifier as the key. |
|||
|
|||
## <span id="global-config">Global Configuration</span> |
|||
|
|||
The client will initialize a `.now` directory in the user's home |
|||
directory upon first running. |
|||
|
|||
There, two files can be found: |
|||
|
|||
- `config.json` |
|||
- `credentials.json` |
|||
|
|||
## Implementation notes |
|||
|
|||
Now is directly modeled after UNIX. It's useful to think of the primary subcommands `deploy`, `alias` and `rm` as being the "cloud equivalents" of `cp`, `ln` and `rm`. |
|||
|
|||
The minimal set of commands that providers must supply are: |
|||
|
|||
<table> |
|||
<tr> |
|||
<td><code>[]</code> / <code>deploy</code></td> |
|||
<td>the default command to launch a deployment</td> |
|||
</tr> |
|||
<tr> |
|||
<td><code>remove</code> / <code>rm</code></td> |
|||
<td>remove a deployment identified by its unique URL</td> |
|||
</tr> |
|||
</table> |
|||
|
|||
Recommended, but not required, commands are: |
|||
|
|||
<table> |
|||
<tr> |
|||
<td><code>logs</code> | <code>ln</code></td> |
|||
<td>associates a URL with a permanent domain name</td> |
|||
</tr> |
|||
<tr> |
|||
<td><code>secrets</code> <code>ls</code> <code>rm</code> <code>add</code></td> |
|||
<td>associates a URL with a permanent domain name</td> |
|||
</tr> |
|||
<tr> |
|||
<td><code>domains</code> <code>ls</code> / <code>add</code> / <code>rm</code></td> |
|||
<td>manage domains</td> |
|||
</tr> |
|||
<tr> |
|||
<td><code>dns</code> <code>ls</code> / <code>add</code> / <code>rm</code></td> |
|||
<td>manage dns records</td> |
|||
</tr> |
|||
<tr> |
|||
<td><code>certs</code> <code>ls</code> / <code>add</code> / <code>rm</code></td> |
|||
<td>manage certificates</td> |
|||
</tr> |
|||
</table> |
|||
|
|||
The `build` step for serverless deployments is implemented locally and is compatible with projects configured with the `type`: |
|||
|
|||
- `nodejs` |
|||
- `go` |
|||
- `static` |
|||
|
|||
## Philosophy |
|||
|
|||
### Immutability |
|||
|
|||
Each time you write `now` a new deployment is provisioned. Whenever |
|||
possible, providers should strive to make deployments idempotent in the |
|||
absence of changes to: |
|||
|
|||
- Originating source code |
|||
- Configuration |
|||
- Environment variables |
|||
|
|||
### Standards compliance |
|||
|
|||
All projects expose a HTTP/1.1-compatible interface. A port is provided |
|||
via the standard `process.env.PORT`. |
|||
|
|||
### Secure |
|||
|
|||
Whenever possible, deployments are strongly encouraged to be served over SSL. The process of provisioning certificates should be transparent to the user. |
|||
|
|||
### Projects should require minimal JSON configuration |
|||
|
|||
Whenever possible, projects should be deployable with minimal or no configuration. |
|||
|
|||
### Avoid manifest duplication |
|||
|
|||
If the configuration or conventions imposed by a programming language |
|||
or framework are present, attempt to provide sane defaults. |
|||
|
|||
Examples of this is the presence of `Dockerfile` or `package.json`. When |
|||
publishing a project it's recommended that the [`type`](#type) is strictly |
|||
configured in [`now.json`](#now-json) to avoid |
|||
|
|||
## Contributions and Roadmap |
|||
|
|||
#### Community |
|||
|
|||
All feedback and suggestions are welcome! |
|||
|
|||
- 💬 Chat: Join us on [zeit.chat](https://zeit.chat) `#now-client`. |
|||
- 📣 Stay up to date on new features and announcments on [@zeithq](https://twitter.com/zeithq). |
|||
- 🔐 Subscribe to our [security](http://zeit.us12.list-manage1.com/subscribe?u=3c9e9e13d7e6dae8faf375bed&id=110e586914) mailing list to stay up-to-date on urgent security disclosures. |
|||
|
|||
Please note: we adhere to the [contributor coventant](http://contributor-covenant.org/) for |
|||
all interactions in our community. |
|||
|
|||
#### Contributions |
|||
|
|||
To get started contributing, make sure you're running `node` `8.x.x`. Clone this repository: |
|||
|
|||
``` |
|||
git clone https://github.com/zeit/now |
|||
``` |
|||
|
|||
To test the [`pkg`](https://github.com/zeit/pkg) binary distribution, run: |
|||
|
|||
``` |
|||
npm run build |
|||
``` |
|||
|
|||
#### Ongoing development |
|||
|
|||
- Support for `now <file>`, with support for: |
|||
- Binaries as a first-class deployment type |
|||
- Static deployments as a fallback |
|||
- We are working on built-in support for provisioning [Kubernetes](https://kubernetes.io/) |
|||
replication controllers and pods, in a similar vein as the [Draft](https://github.com/azure/draft) project. |
|||
- A simple API to register custom providers and pluggable build systems externally, such as Travis, Circle CI, etc. |
|||
- A companion desktop app [Now Desktop](https://github.com/zeit/now-desktop) |
|||
is available, released under the MIT license. |
|||
Work is ongoing for pluggable providers to enable: |
|||
- Team collaboration |
|||
- One-click context switch |
|||
- Drag and drop deployments |
|||
- Adding interoperabity between objects that live in different providers |
|||
- Providing a Next.js and React powered dashboard that can be deployed anywhere |
|||
|
|||
## License |
|||
|
|||
Now is licensed under the Apache License, Version 2.0. |
|||
See LICENSE for the full license text. |
|||
|
@ -0,0 +1,5 @@ |
|||
require('http') |
|||
.createServer((req, res) => { |
|||
res.end('Hello world!') |
|||
}) |
|||
.listen(process.env.PORT) |
@ -0,0 +1,9 @@ |
|||
{ |
|||
"name": "micro-example", |
|||
"dependencies": { |
|||
"micro": "latest" |
|||
}, |
|||
"scripts": { |
|||
"start": "micro server" |
|||
} |
|||
} |
@ -0,0 +1,8 @@ |
|||
module.exports = () => { |
|||
return { |
|||
project: { |
|||
type: 'microservice', |
|||
poweredBy: '▲' |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,17 @@ |
|||
<!DOCTYPE html> |
|||
<html lang="en"> |
|||
<head> |
|||
<title>My first now deployment</title> |
|||
<meta charset="utf-8" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1"> |
|||
<link rel="stylesheet" href="style.css"> |
|||
</head> |
|||
<body> |
|||
<div> |
|||
<p style="font: 50px Helvetica"> |
|||
▲ |
|||
</p> |
|||
</div> |
|||
</body> |
|||
</html> |
|||
|
@ -0,0 +1,21 @@ |
|||
body { |
|||
background: #000; |
|||
color: #fff; |
|||
} |
|||
|
|||
div { |
|||
display: flex; |
|||
align-items: center; |
|||
height: 100%; |
|||
width: 100%; |
|||
position: absolute; |
|||
} |
|||
|
|||
p { |
|||
font-size: 200px; |
|||
margin: 0; |
|||
padding: 0; |
|||
width: 100%; |
|||
text-align: center; |
|||
} |
|||
|
@ -0,0 +1,112 @@ |
|||
{ |
|||
"name": "now", |
|||
"version": "8.0.0-beta.1", |
|||
"dependencies": { |
|||
"ansi-escapes": "^2.0.0", |
|||
"archiver": "^2.0.0", |
|||
"array-unique": "^0.3.2", |
|||
"async-retry": "^1.1.3", |
|||
"aws-sdk": "^2.82.0", |
|||
"bytes": "^2.5.0", |
|||
"chalk": "^2.0.1", |
|||
"clipboardy": "^1.1.4", |
|||
"convert-stream": "^1.0.2", |
|||
"debug": "^2.6.8", |
|||
"deployment-type": "^1.0.1", |
|||
"docker-file-parser": "^1.0.2", |
|||
"dotenv": "^4.0.0", |
|||
"download": "^6.2.5", |
|||
"email-prompt": "^0.3.1", |
|||
"email-validator": "^1.0.7", |
|||
"fs-extra": "^3.0.1", |
|||
"fs.promised": "^3.0.0", |
|||
"glob": "^7.1.2", |
|||
"ignore": "^3.3.3", |
|||
"ini": "^1.3.4", |
|||
"inquirer": "^3.2.0", |
|||
"is-url": "^1.2.2", |
|||
"minimist": "^1.2.0", |
|||
"mkdirp-promise": "^5.0.1", |
|||
"ms": "^2.0.0", |
|||
"node-fetch": "^1.7.1", |
|||
"opn": "^5.1.0", |
|||
"ora": "^1.3.0", |
|||
"pipe-streams-to-promise": "^0.2.0", |
|||
"resumer": "^0.0.0", |
|||
"socket.io-client": "^2.0.3", |
|||
"split-array": "^1.0.1", |
|||
"strip-ansi": "^4.0.0", |
|||
"tar-fs": "^1.15.3", |
|||
"then-sleep": "^1.0.1", |
|||
"tmp-promise": "^1.0.3", |
|||
"uid-promise": "^1.0.0" |
|||
}, |
|||
"main": "./out/now.js", |
|||
"files": [ |
|||
"out" |
|||
], |
|||
"bin": { |
|||
"now": "./out/now.js" |
|||
}, |
|||
"devDependencies": { |
|||
"ava": "^0.20.0", |
|||
"babel-cli": "^6.24.1", |
|||
"babel-eslint": "^7.2.3", |
|||
"babel-preset-flow": "^6.23.0", |
|||
"babel-register": "^6.24.1", |
|||
"eslint": "^4.1.1", |
|||
"flow-bin": "^0.49.1", |
|||
"flow-remove-types": "^1.2.1", |
|||
"lint-staged": "^4.0.1", |
|||
"pre-commit": "^1.2.2", |
|||
"prettier": "^1.5.2" |
|||
}, |
|||
"scripts": { |
|||
"build": "babel src/ -d out/ && chmod +x out/now.js", |
|||
"test": "eslint . && flow", |
|||
"prepublish": "yarn run test && yarn run build", |
|||
"lint:staged": "lint-staged", |
|||
"dev": "yarn run build && ./out/now.js" |
|||
}, |
|||
"pre-commit": "lint:staged", |
|||
"lint-staged": { |
|||
"*.js": [ |
|||
"eslint", |
|||
"prettier --write --single-quote --no-semi", |
|||
"git add" |
|||
] |
|||
}, |
|||
"eslintConfig": { |
|||
"parserOptions": { |
|||
"ecmaVersion": 8, |
|||
"sourceType": "script" |
|||
}, |
|||
"parser": "babel-eslint", |
|||
"extends": [ |
|||
"eslint:recommended" |
|||
], |
|||
"env": { |
|||
"es6": true, |
|||
"node": true |
|||
}, |
|||
"rules": { |
|||
"func-names": [ |
|||
"error", |
|||
"as-needed" |
|||
], |
|||
"no-console": 0, |
|||
"no-shadow": "error", |
|||
"no-extra-semi": 0 |
|||
} |
|||
}, |
|||
"babel": { |
|||
"presets": [ |
|||
"flow" |
|||
] |
|||
}, |
|||
"ava": { |
|||
"require": [ |
|||
"babel-register" |
|||
] |
|||
} |
|||
} |
@ -0,0 +1,107 @@ |
|||
const { join, basename } = require('path') |
|||
const { exists, stat, readFile } = require('fs.promised') |
|||
|
|||
const describeProject = async path => { |
|||
let nowJSON = null |
|||
let packageJSON = null |
|||
|
|||
const s = await stat(path) |
|||
if (s.isFile()) { |
|||
throw new Error( |
|||
'Deploying files directly is coming! Please supply a directory' |
|||
) |
|||
} |
|||
|
|||
const nowJSONPath = join(path, 'now.json') |
|||
|
|||
if (await exists(nowJSONPath)) { |
|||
nowJSON = JSON.parse(await readFile(nowJSONPath)) |
|||
} |
|||
|
|||
const packageJSONPath = join(path, 'package.json') |
|||
|
|||
if (await exists(packageJSONPath)) { |
|||
packageJSON = JSON.parse(await readFile(packageJSONPath)) |
|||
} |
|||
|
|||
if (packageJSON && packageJSON.now && nowJSON) { |
|||
const err = new Error( |
|||
'Ambigous config: package.json (with `now` field) and now.json' |
|||
) |
|||
err.code = 'AMBIGOUS_CONFIG' |
|||
err.files = ['package.json', 'now.json'] |
|||
throw err |
|||
} |
|||
|
|||
if (nowJSON && (nowJSON.type === 'npm' || nowJSON.type === 'node')) { |
|||
console.log( |
|||
'DEPRECATED: `npm` and `node` types should be `nodejs` in `now.json`' |
|||
) |
|||
nowJSON.type = 'nodejs' |
|||
} |
|||
|
|||
// npm has a convention that `npm start`, if not defined,
|
|||
// will invoke `node server.js`
|
|||
const hasServerJSFile = await exists(join(path, 'server.js')) |
|||
|
|||
// we support explicit definition of nodejs as a type, or we
|
|||
// guess it based on `package.json` or
|
|||
if ( |
|||
(nowJSON && nowJSON.type === 'nodejs') || |
|||
((!nowJSON || !nowJSON.type) && (packageJSON || hasServerJSFile)) |
|||
) { |
|||
return { |
|||
name: getName(path, nowJSON, packageJSON), |
|||
description: getDescription(nowJSON, packageJSON), |
|||
type: 'nodejs', |
|||
nowJSON, |
|||
packageJSON, |
|||
hasServerJSFile |
|||
} |
|||
} |
|||
|
|||
if (nowJSON && nowJSON.type) { |
|||
return { |
|||
name: getName(path, nowJSON), |
|||
description: getDescription(nowJSON), |
|||
type: nowJSON.type, |
|||
nowJSON |
|||
} |
|||
} else if (await exists(join(path, 'main.go'))) { |
|||
return { |
|||
name: getName(path, nowJSON), |
|||
description: getDescription(nowJSON), |
|||
type: 'go' |
|||
} |
|||
} else { |
|||
return { |
|||
type: 'static' |
|||
} |
|||
} |
|||
} |
|||
|
|||
const getName = (path, nowJSON = null, packageJSON = null) => { |
|||
if (nowJSON && nowJSON.name != null) { |
|||
return nowJSON.name.toLowerCase() |
|||
} |
|||
|
|||
if (packageJSON && packageJSON.name != null) { |
|||
return packageJSON.name.toLowerCase() |
|||
} |
|||
|
|||
return basename(path).replace(/[^\w]+/g, '-').toLowerCase() |
|||
} |
|||
|
|||
const getDescription = (nowJSON = null, packageJSON = null) => { |
|||
if (nowJSON && nowJSON.description != null) { |
|||
return nowJSON.description |
|||
} |
|||
|
|||
if (packageJSON && packageJSON.name != null) { |
|||
return packageJSON.description |
|||
} |
|||
|
|||
return null |
|||
} |
|||
|
|||
module.exports = describeProject |
@ -0,0 +1,7 @@ |
|||
const getDefaultAuthCfg = () => ({ |
|||
_: |
|||
'This is your now credentials file. DONT SHARE! More: https://git.io/now-global-config', |
|||
credentials: [] |
|||
}) |
|||
|
|||
module.exports = getDefaultAuthCfg |
@ -0,0 +1,6 @@ |
|||
const getDefaultCfg = () => ({ |
|||
_: |
|||
'This is your now credentials file. DONT SHARE! More: https://git.io/now-global-config' |
|||
}) |
|||
|
|||
module.exports = getDefaultCfg |
@ -0,0 +1,46 @@ |
|||
const cmd = require('./util/output/cmd') |
|||
const li = require('./util/output/list-item') |
|||
const link = require('./util/output/link') |
|||
const { gray, bold } = require('chalk') |
|||
|
|||
// prettier-disable
|
|||
const getHelp = (currentProvider, providers) => |
|||
` |
|||
${bold('Now')}: universal serverless deployments. |
|||
|
|||
To deploy, run in any directory of your choosing: |
|||
|
|||
${cmd('now')} |
|||
|
|||
The deployment backend provider is fully configurable. |
|||
The following are supported: |
|||
|
|||
${Object.keys(providers) |
|||
.map(name => |
|||
li( |
|||
`${bold(name)}\t ${providers[name] |
|||
.title}\t\t\t\t\t${currentProvider === name |
|||
? gray('(default)') |
|||
: ' '}` |
|||
) |
|||
) |
|||
.join('\n ')} |
|||
|
|||
For example, to setup AWS Lambda functions run: |
|||
|
|||
${cmd('now aws login')} |
|||
|
|||
Some useful subcommands: |
|||
|
|||
${li(cmd('now ls'))} |
|||
${li(cmd('now rm'))} |
|||
${li(cmd('now alias'))} |
|||
|
|||
To read more in-depth documentation, run: |
|||
|
|||
${cmd('now [provider] [subcommand] help')} |
|||
|
|||
For more information: ${link('https://github.com/zeit/now')}. |
|||
` |
|||
|
|||
module.exports = getHelp |
@ -0,0 +1,8 @@ |
|||
const { homedir } = require('os') |
|||
const { join } = require('path') |
|||
|
|||
const getNowDir = () => { |
|||
return process.env.NOW_HOME || join(homedir(), '.now') |
|||
} |
|||
|
|||
module.exports = getNowDir |
@ -0,0 +1,40 @@ |
|||
const cmd = require('./util/output/cmd') |
|||
const li = require('./util/output/list-item') |
|||
const link = require('./util/output/link') |
|||
const { gray, bold } = require('chalk') |
|||
|
|||
// prettier-disable
|
|||
const getWelcome = (currentProvider, providers) => |
|||
` |
|||
Welcome to ${bold('Now')}! |
|||
|
|||
Our tool makes serverless deployment universal and instant, |
|||
with just one command: ${cmd('now')}. |
|||
|
|||
To setup deployments with ${link('https://now.sh')} run: |
|||
|
|||
${cmd('now login')} |
|||
|
|||
The following providers are also supported |
|||
|
|||
${Object.keys(providers) |
|||
.map(name => |
|||
li( |
|||
`${bold(name)}\t ${providers[name] |
|||
.title}\t\t\t\t\t${currentProvider === name |
|||
? gray('(default)') |
|||
: ' '}` |
|||
) |
|||
) |
|||
.join('\n ')} |
|||
|
|||
To set up AWS, for example, run ${cmd('now aws login')}. |
|||
Many can be configured simultaneously! |
|||
|
|||
Hope you enjoy Now! Check out these other resources: |
|||
|
|||
${li(`Run ${cmd('now help')} for more info and examples`)} |
|||
${li(link('https://github.com/zeit/now'))} |
|||
` |
|||
|
|||
module.exports = getWelcome |
@ -0,0 +1,397 @@ |
|||
#!/usr/bin/env node
|
|||
//@flow
|
|||
const start = Date.now() |
|||
|
|||
// theirs
|
|||
const debug = require('debug')('now:main') |
|||
const { exists } = require('fs.promised') |
|||
const { join } = require('path') |
|||
const mkdirp = require('mkdirp-promise') |
|||
const minimist = require('minimist') |
|||
|
|||
// ours
|
|||
const error = require('./util/output/error') |
|||
const effect = require('./util/output/effect') |
|||
const param = require('./util/output/param') |
|||
const getHelp = require('./get-help') |
|||
const getWelcome = require('./get-welcome') |
|||
const getNowDir = require('./get-now-dir') |
|||
const getDefaultCfg = require('./get-default-cfg') |
|||
const getDefaultAuthCfg = require('./get-default-auth-cfg') |
|||
const hp = require('./util/humanize-path') |
|||
const providers = require('./providers') |
|||
const configFiles = require('./util/config-files') |
|||
|
|||
const NOW_DIR = getNowDir() |
|||
const NOW_CONFIG_PATH = configFiles.getConfigFilePath() |
|||
const NOW_AUTH_CONFIG_PATH = configFiles.getAuthConfigFilePath() |
|||
|
|||
const GLOBAL_COMMANDS = new Set(['help']) |
|||
|
|||
const exit = code => { |
|||
debug('finished in', Date.now() - start) |
|||
process.exit(code) |
|||
} |
|||
|
|||
const main = async argv_ => { |
|||
const argv = minimist(argv_, { |
|||
boolean: ['help', 'version'], |
|||
alias: { |
|||
help: 'h', |
|||
version: 'v' |
|||
} |
|||
}) |
|||
|
|||
// the second argument to the command can be a path
|
|||
// (as in: `now path/`) or a subcommand / provider
|
|||
// (as in: `now ls` or `now aws help`)
|
|||
let targetOrSubcommand: ?string = argv._[2] |
|||
|
|||
// we want to handle version or help directly only
|
|||
if (!targetOrSubcommand) { |
|||
if (argv.version) { |
|||
console.log(require('../package').version) |
|||
return 0 |
|||
} |
|||
} |
|||
|
|||
let nowDirExists |
|||
|
|||
try { |
|||
nowDirExists = await exists(NOW_DIR) |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to find the ' + |
|||
'now global directory: ' + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
if (!nowDirExists) { |
|||
try { |
|||
await mkdirp(NOW_DIR) |
|||
} catch (err) { |
|||
error( |
|||
'An unexpected error occurred while trying to create the ' + |
|||
`now global directory "${hp(NOW_DIR)}" ` + |
|||
err.message |
|||
) |
|||
} |
|||
} |
|||
|
|||
let initConfig = false |
|||
let initAuthConfig = false |
|||
let configExists |
|||
|
|||
try { |
|||
configExists = await exists(NOW_CONFIG_PATH) |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to find the ' + |
|||
`now config file "${hp(NOW_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
let config |
|||
|
|||
if (configExists) { |
|||
try { |
|||
config = configFiles.readConfigFile() |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to read the ' + |
|||
`now config in "${hp(NOW_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
try { |
|||
config = JSON.parse(config) |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
`An error occurred while trying to parse "${hp(NOW_CONFIG_PATH)}": ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} else { |
|||
config = getDefaultCfg() |
|||
try { |
|||
configFiles.writeToConfigFile(config) |
|||
initConfig = true |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to write the ' + |
|||
`default now config to "${hp(NOW_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} |
|||
|
|||
let authConfigExists |
|||
|
|||
try { |
|||
authConfigExists = await exists(NOW_AUTH_CONFIG_PATH) |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to find the ' + |
|||
`now auth file "${hp(NOW_AUTH_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
let authConfig = null |
|||
|
|||
if (authConfigExists) { |
|||
try { |
|||
authConfig = configFiles.readAuthConfigFile() |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to read the ' + |
|||
`now auth config in "${hp(NOW_AUTH_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
try { |
|||
authConfig = JSON.parse(authConfig) |
|||
|
|||
if (!Array.isArray(authConfig.credentials)) { |
|||
console.error( |
|||
error( |
|||
`The content of "${hp(NOW_AUTH_CONFIG_PATH)}" is invalid. ` + |
|||
'No `credentials` list found inside' |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
for (const [i, { provider }] of authConfig.credentials.entries()) { |
|||
if (null == provider) { |
|||
console.error( |
|||
error( |
|||
`Invalid credential found in "${hp(NOW_AUTH_CONFIG_PATH)}". ` + |
|||
`Missing \`provider\` key in entry with index ${i}` |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
if (!(provider in providers)) { |
|||
console.error( |
|||
error( |
|||
`Invalid credential found in "${hp(NOW_AUTH_CONFIG_PATH)}". ` + |
|||
`Unknown provider "${provider}"` |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
`An error occurred while trying to parse "${hp( |
|||
NOW_AUTH_CONFIG_PATH |
|||
)}": ` + err.message
|
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} else { |
|||
authConfig = getDefaultAuthCfg() |
|||
try { |
|||
configFiles.writeToAuthConfigFile(authConfig) |
|||
initAuthConfig = true |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
'An unexpected error occurred while trying to write the ' + |
|||
`default now config to "${hp(NOW_CONFIG_PATH)}" ` + |
|||
err.message |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} |
|||
|
|||
if (initConfig || initAuthConfig) { |
|||
console.log( |
|||
effect( |
|||
`Initialized default config in "${initConfig && initAuthConfig |
|||
? hp(NOW_DIR) |
|||
: hp(initConfig ? NOW_CONFIG_PATH : NOW_AUTH_CONFIG_PATH)}"` |
|||
) |
|||
) |
|||
} |
|||
|
|||
let suppliedProvider = null |
|||
|
|||
// if the target is something like `aws`
|
|||
if (targetOrSubcommand in providers) { |
|||
debug('user supplied a known provider') |
|||
const targetPath = join(process.cwd(), targetOrSubcommand) |
|||
const targetPathExists = await exists(targetPath) |
|||
|
|||
if (targetPathExists) { |
|||
console.error( |
|||
error( |
|||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous. ` + |
|||
'Both a directory and a provider are known' |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
suppliedProvider = targetOrSubcommand |
|||
targetOrSubcommand = argv._[3] |
|||
} |
|||
|
|||
let { defaultProvider = null }: { defaultProvider: ?string } = config |
|||
|
|||
if (null === suppliedProvider) { |
|||
if (null === defaultProvider) { |
|||
// the first provider the user ever logged in to is
|
|||
// the default provider
|
|||
if (authConfig && authConfig.credentials.length) { |
|||
debug('using first credential as default provider') |
|||
defaultProvider = authConfig.credentials[0].provider |
|||
} else { |
|||
debug(`fallbacking to default now provider 'sh'`) |
|||
defaultProvider = 'sh' |
|||
} |
|||
} else { |
|||
debug('using provider supplied by user', defaultProvider) |
|||
if (!(defaultProvider in providers)) { |
|||
console.error( |
|||
error( |
|||
`The \`defaultProvider\` "${defaultProvider}" supplied in ` + |
|||
`"${NOW_CONFIG_PATH}" is not a valid provider` |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
} |
|||
} |
|||
|
|||
// we special case help because it's a generic command with
|
|||
// information about all providers
|
|||
if (!suppliedProvider && argv.help) { |
|||
console.log(getHelp(defaultProvider, providers)) |
|||
return 0 |
|||
} |
|||
|
|||
const provider: Object = providers[suppliedProvider || defaultProvider] |
|||
|
|||
// the context object to supply to the providers
|
|||
const ctx = { |
|||
config, |
|||
authConfig, |
|||
argv: argv_ |
|||
} |
|||
|
|||
let subcommand |
|||
|
|||
// we check if we are deploying something
|
|||
if (targetOrSubcommand) { |
|||
const targetPath = join(process.cwd(), targetOrSubcommand) |
|||
const targetPathExists = await exists(targetPath) |
|||
|
|||
const subcommandExists = |
|||
GLOBAL_COMMANDS.has(targetOrSubcommand) || |
|||
provider.subcommands.has(targetOrSubcommand) |
|||
|
|||
if (targetPathExists && subcommandExists) { |
|||
console.error( |
|||
error( |
|||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous. ` + |
|||
'Both a directory and a subcommand are known' |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
if (subcommandExists) { |
|||
debug('user supplied known subcommand', targetOrSubcommand) |
|||
subcommand = targetOrSubcommand |
|||
} else { |
|||
debug('user supplied a possible target for deployment') |
|||
// our default command is deployment
|
|||
// at this point we're
|
|||
subcommand = 'deploy' |
|||
} |
|||
} else { |
|||
debug('user supplied no target, defaulting to deploy') |
|||
subcommand = 'deploy' |
|||
} |
|||
|
|||
if (subcommand === 'deploy' && !authConfig.credentials.length) { |
|||
debug('subcommand is deploy, but user has no credentials') |
|||
console.log(getWelcome(provider, providers)) |
|||
return 0 |
|||
} |
|||
|
|||
if (subcommand === 'help') { |
|||
console.log(getHelp(defaultProvider, providers)) |
|||
return 0 |
|||
} |
|||
|
|||
try { |
|||
return provider[subcommand](ctx) |
|||
} catch (err) { |
|||
console.error( |
|||
error( |
|||
`An unexpected error occurred in provider ${subcommand}: ${err.stack}` |
|||
) |
|||
) |
|||
} |
|||
} |
|||
|
|||
debug('start') |
|||
|
|||
const handleRejection = err => { |
|||
debug('handling rejection') |
|||
if (err) { |
|||
if (err instanceof Error) { |
|||
handleUnexpected(err) |
|||
} else { |
|||
console.error(error(`An unexpected rejection occurred\n ${err}`)) |
|||
} |
|||
} else { |
|||
console.error(error('An unexpected empty rejection occurred')) |
|||
} |
|||
return 1 |
|||
} |
|||
|
|||
const handleUnexpected = err => { |
|||
debug('handling unexpected error') |
|||
console.error( |
|||
error(`An unexpected error occurred!\n ${err.stack} ${err.stack}`) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
process.on('uncaughtRejection', handleRejection) |
|||
process.on('uncaughtException', handleUnexpected) |
|||
|
|||
main(process.argv).then((code: number) => exit(code)).catch(handleUnexpected) |
@ -0,0 +1,362 @@ |
|||
// @flow
|
|||
|
|||
// theirs
|
|||
const ms = require('ms') |
|||
const minimist = require('minimist') |
|||
const { gray, bold } = require('chalk') |
|||
const bytes = require('bytes') |
|||
const uid = require('uid-promise') |
|||
const debug = require('debug')('now:aws:deploy') |
|||
|
|||
// ours
|
|||
const resolve = require('../../resolve') |
|||
const ok = require('../../util/output/ok') |
|||
const wait = require('../../util/output/wait') |
|||
const info = require('../../util/output/info') |
|||
const error = require('../../util/output/error') |
|||
const link = require('../../util/output/link') |
|||
const success = require('../../util/output/success') |
|||
const param = require('../../util/output/param') |
|||
const humanPath = require('../../util/humanize-path') |
|||
const build = require('../../serverless/build') |
|||
const getLambdaHandler = require('./get-lambda-handler') |
|||
const getAWS = require('./get-aws') |
|||
const describeProject = require('../../describe-project') |
|||
const copyToClipboard = require('../../util/copy-to-clipboard') |
|||
|
|||
const NOW_DEFAULT_IAM_ROLE = 'now-default-role' |
|||
const IAM_POLICY_DOCUMENT = { |
|||
Version: '2012-10-17', |
|||
Statement: [ |
|||
{ |
|||
Sid: '', |
|||
Effect: 'Allow', |
|||
Principal: { |
|||
Service: 'lambda.amazonaws.com' |
|||
}, |
|||
Action: 'sts:AssumeRole' |
|||
} |
|||
] |
|||
} |
|||
|
|||
const deploy = async ({ config, authConfig, argv: argv_ }) => { |
|||
const argv = minimist(argv_, { |
|||
boolean: ['help'], |
|||
alias: { |
|||
help: 'h' |
|||
} |
|||
}) |
|||
|
|||
// `now [provider] [deploy] [target]`
|
|||
const [cmdOrTarget = null, target_ = null] = argv._.slice(2).slice(-2) |
|||
|
|||
let target |
|||
|
|||
if (cmdOrTarget === 'aws' || cmdOrTarget === 'deploy') { |
|||
target = target_ === null ? process.cwd() : target_ |
|||
} else { |
|||
if (target_) { |
|||
console.error(error('Unexpected number of arguments for deploy command')) |
|||
return 1 |
|||
} else { |
|||
target = cmdOrTarget === null ? process.cwd() : cmdOrTarget |
|||
} |
|||
} |
|||
|
|||
const start = Date.now() |
|||
const resolved = await resolve(target) |
|||
|
|||
if (resolved === null) { |
|||
console.error(error(`Could not resolve deployment target ${param(target)}`)) |
|||
return 1 |
|||
} |
|||
|
|||
let desc = null |
|||
|
|||
try { |
|||
desc = await describeProject(resolved) |
|||
} catch (err) { |
|||
if (err.code === 'AMBIGOUS_CONFIG') { |
|||
console.error( |
|||
error(`There is more than one source of \`now\` config: ${err.files}`) |
|||
) |
|||
return 1 |
|||
} else { |
|||
throw err |
|||
} |
|||
} |
|||
|
|||
// a set of files that we personalize for this build
|
|||
const overrides = { |
|||
'__now_handler.js': getLambdaHandler(desc) |
|||
} |
|||
|
|||
// initialize aws client
|
|||
const aws = getAWS(authConfig) |
|||
const region = aws.config.region || 'us-west-1' |
|||
|
|||
console.log( |
|||
info( |
|||
`Deploying ${param(humanPath(resolved))} ${gray('(aws)')} ${gray( |
|||
`(${region})` |
|||
)}` |
|||
) |
|||
) |
|||
const buildStart = Date.now() |
|||
const stopBuildSpinner = wait('Building and bundling your app…') |
|||
const zipFile = await build(resolved, desc, { overrides }) |
|||
stopBuildSpinner() |
|||
|
|||
// lambda limits to 50mb
|
|||
if (zipFile.length > 50 * 1024 * 1024) { |
|||
console.error(error('The build exceeds the 50mb AWS Lambda limit')) |
|||
return 1 |
|||
} |
|||
|
|||
console.log( |
|||
ok( |
|||
`Build generated a ${bold(bytes(zipFile.length))} zip ${gray( |
|||
`[${ms(Date.now() - buildStart)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
const iam = new aws.IAM({ apiVersion: '2010-05-08' }) |
|||
|
|||
const gateway = new aws.APIGateway({ |
|||
apiVersion: '2015-07-09', |
|||
region |
|||
}) |
|||
|
|||
const lambda = new aws.Lambda({ |
|||
apiVersion: '2015-03-31', |
|||
region |
|||
}) |
|||
|
|||
let role |
|||
|
|||
try { |
|||
role = await getRole(iam, { RoleName: NOW_DEFAULT_IAM_ROLE }) |
|||
} catch (err) { |
|||
if ('NoSuchEntity' === err.code) { |
|||
const iamStart = Date.now() |
|||
role = await createRole(iam, { |
|||
AssumeRolePolicyDocument: JSON.stringify(IAM_POLICY_DOCUMENT), |
|||
RoleName: NOW_DEFAULT_IAM_ROLE |
|||
}) |
|||
console.log( |
|||
ok( |
|||
`Initialized IAM role ${param(NOW_DEFAULT_IAM_ROLE)} ${gray( |
|||
`[${ms(iamStart - Date.now())}]` |
|||
)}` |
|||
) |
|||
) |
|||
} else { |
|||
throw err |
|||
} |
|||
} |
|||
|
|||
const deploymentId = 'now-' + desc.name + '-' + (await uid(10)) |
|||
|
|||
const resourcesStart = Date.now() |
|||
const stopResourcesSpinner = wait('Creating API resources') |
|||
|
|||
debug('initializing lambda function') |
|||
const λ = await createFunction(lambda, { |
|||
Code: { |
|||
ZipFile: zipFile |
|||
}, |
|||
Runtime: 'nodejs6.10', |
|||
Description: desc.description, |
|||
FunctionName: deploymentId, |
|||
Handler: '__now_handler.handler', |
|||
Role: role.Role.Arn, |
|||
Timeout: 15, |
|||
MemorySize: 512 |
|||
}) |
|||
|
|||
debug('initializing api gateway') |
|||
const api = await createAPI(gateway, { |
|||
name: deploymentId, |
|||
description: desc.description |
|||
}) |
|||
|
|||
debug('retrieving root resource id') |
|||
const resources = await getResources(gateway, { |
|||
restApiId: api.id |
|||
}) |
|||
const rootResourceId = resources.items[0].id |
|||
|
|||
debug('initializing gateway method for /') |
|||
await putMethod(gateway, { |
|||
restApiId: api.id, |
|||
authorizationType: 'NONE', |
|||
httpMethod: 'ANY', |
|||
resourceId: rootResourceId |
|||
}) |
|||
|
|||
debug('initializing gateway integration for /') |
|||
await putIntegration(gateway, { |
|||
restApiId: api.id, |
|||
resourceId: rootResourceId, |
|||
httpMethod: 'ANY', |
|||
type: 'AWS_PROXY', |
|||
integrationHttpMethod: 'POST', |
|||
uri: `arn:aws:apigateway:${region}:lambda:path/2015-03-31/functions/${λ.FunctionArn}/invocations` |
|||
}) |
|||
|
|||
debug('initializing gateway resource') |
|||
const resource = await createResource(gateway, { |
|||
restApiId: api.id, |
|||
parentId: rootResourceId, |
|||
pathPart: '{proxy+}' |
|||
}) |
|||
|
|||
debug('initializing gateway method for {proxy+}') |
|||
await putMethod(gateway, { |
|||
restApiId: api.id, |
|||
authorizationType: 'NONE', |
|||
httpMethod: 'ANY', |
|||
resourceId: resource.id |
|||
}) |
|||
|
|||
debug('initializing gateway integration for {proxy+}') |
|||
await putIntegration(gateway, { |
|||
restApiId: api.id, |
|||
resourceId: resource.id, |
|||
httpMethod: 'ANY', |
|||
type: 'AWS_PROXY', |
|||
integrationHttpMethod: 'POST', |
|||
uri: `arn:aws:apigateway:${region}:lambda:path/2015-03-31/functions/${λ.FunctionArn}/invocations` |
|||
}) |
|||
|
|||
debug('creating deployment') |
|||
await createDeployment(gateway, { |
|||
restApiId: api.id, |
|||
stageName: 'now' |
|||
}) |
|||
|
|||
const [, accountId] = role.Role.Arn.match(/^arn:aws:iam::(\d+):/) |
|||
|
|||
await addPermission(lambda, { |
|||
FunctionName: deploymentId, |
|||
StatementId: deploymentId, |
|||
Action: 'lambda:InvokeFunction', |
|||
Principal: 'apigateway.amazonaws.com', |
|||
SourceArn: `arn:aws:execute-api:${region}:${accountId}:${api.id}/now/ANY/*` |
|||
}) |
|||
|
|||
stopResourcesSpinner() |
|||
console.log( |
|||
ok( |
|||
`API resources created (id: ${param(deploymentId)}) ${gray( |
|||
`[${ms(Date.now() - resourcesStart)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
const url = `https://${api.id}.execute-api.${region}.amazonaws.com/now` |
|||
const copied = copyToClipboard(url, config.copyToClipboard) |
|||
|
|||
console.log( |
|||
success( |
|||
`${link(url)} ${copied ? gray('(in clipboard)') : ''} ${gray( |
|||
`[${ms(Date.now() - start)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
return 0 |
|||
} |
|||
|
|||
const getRole = (iam, params) => { |
|||
return new Promise((res, reject) => { |
|||
iam.getRole(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const createRole = (iam, params) => { |
|||
return new Promise((res, reject) => { |
|||
iam.createRole(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const createFunction = (lambda, params) => { |
|||
return new Promise((res, reject) => { |
|||
lambda.createFunction(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const addPermission = (lambda, params) => { |
|||
return new Promise((res, reject) => { |
|||
lambda.addPermission(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const createAPI = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.createRestApi(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const getResources = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.getResources(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const createResource = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.createResource(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const putMethod = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.putMethod(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const putIntegration = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.putIntegration(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
const createDeployment = (gateway, params) => { |
|||
return new Promise((res, reject) => { |
|||
gateway.createDeployment(params, (err, data) => { |
|||
if (err) return reject(err) |
|||
res(data) |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
module.exports = deploy |
@ -0,0 +1,18 @@ |
|||
const aws = require('aws-sdk') |
|||
|
|||
const getAWS = authConfig => { |
|||
const { credentials } = authConfig |
|||
const awsCredentials: Object = credentials.find(c => c.provider === 'aws') |
|||
|
|||
if (awsCredentials.useVendorConfig) { |
|||
aws.config.credentials = new aws.SharedIniFileCredentials() |
|||
} else { |
|||
aws.config = new aws.Config() |
|||
aws.config.accessKeyId = awsCredentials.accessKeyId |
|||
aws.config.secretAccessKey = awsCredentials.secretAccessKey |
|||
} |
|||
|
|||
return aws |
|||
} |
|||
|
|||
module.exports = getAWS |
@ -0,0 +1,61 @@ |
|||
const getHandler = require('../../serverless/get-handler') |
|||
|
|||
// generate the handler that we'll use as the ƛ function
|
|||
const getLambdaHandler = desc => { |
|||
// the command that our handler will invoke to fire up
|
|||
// the user-suppled HTTP server
|
|||
let cmd = null |
|||
let script = null |
|||
|
|||
if (desc.packageJSON) { |
|||
if (desc.packageJSON.scripts && desc.packageJSON.scripts.start) { |
|||
cmd = desc.packageJSON.scripts.start |
|||
} else { |
|||
// `node .` will use `main` or fallback to `index.js`
|
|||
script = './' |
|||
} |
|||
} else { |
|||
if (desc.hasServerJSFile) { |
|||
script = 'server.js' |
|||
} else { |
|||
script = 'index.js' |
|||
} |
|||
} |
|||
|
|||
return getHandler({ script, cmd }, (makeRequest, getPort, req, ctx, fn) => { |
|||
const url = |
|||
req.path + |
|||
'?' + |
|||
require('querystring').stringify(req.queryStringParameters) |
|||
const proxy = makeRequest( |
|||
{ |
|||
port: getPort(), |
|||
hostname: '127.0.0.1', |
|||
path: url, |
|||
method: req.httpMethod, |
|||
headers: req.headers |
|||
}, |
|||
proxyRes => { |
|||
let body = '' |
|||
proxyRes.on('data', data => { |
|||
body += data |
|||
}) |
|||
proxyRes.on('error', err => { |
|||
fn(err) |
|||
body = '' |
|||
}) |
|||
proxyRes.on('end', () => { |
|||
fn(null, { |
|||
statusCode: proxyRes.statusCode, |
|||
headers: proxyRes.headers, |
|||
body |
|||
}) |
|||
}) |
|||
} |
|||
) |
|||
proxy.on('error', fn) |
|||
proxy.end(req.body) |
|||
}) |
|||
} |
|||
|
|||
module.exports = getLambdaHandler |
@ -0,0 +1,5 @@ |
|||
const help = () => { |
|||
console.log('halp') |
|||
} |
|||
|
|||
module.exports = help |
@ -0,0 +1,13 @@ |
|||
module.exports = { |
|||
title: 'AWS Lambda', |
|||
subcommands: new Set(['help', 'login', 'deploy', 'ls']), |
|||
get deploy() { |
|||
return require('./deploy') |
|||
}, |
|||
get help() { |
|||
return require('./help') |
|||
}, |
|||
get login() { |
|||
return require('./login') |
|||
} |
|||
} |
@ -0,0 +1,162 @@ |
|||
// @flow
|
|||
// theirs
|
|||
const { green, italic } = require('chalk') |
|||
|
|||
// ours
|
|||
const info = require('../../util/output/info') |
|||
const note = require('../../util/output/note') |
|||
const aborted = require('../../util/output/aborted') |
|||
const cmd = require('../../util/output/cmd') |
|||
const param = require('../../util/output/param') |
|||
const ready = require('../../util/output/ready') |
|||
const highlight = require('../../util/output/highlight') |
|||
const listItem = require('../../util/output/list-item') |
|||
const link = require('../../util/output/link') |
|||
const textInput = require('../../util/input/text') |
|||
const eraseLines = require('../../util/output/erase-lines') |
|||
const chars = require('../../util/output/chars') |
|||
const { |
|||
hasExternalCredentials, |
|||
getExternalCredentials, |
|||
AWS_CREDENTIALS_FILE_PATH |
|||
} = require('./util/external-credentials') |
|||
const promptBool = require('../../util/input/prompt-bool') |
|||
const { |
|||
writeToAuthConfigFile, |
|||
getAuthConfigFilePath |
|||
} = require('../../util/config-files') |
|||
const humanize = require('../../util/humanize-path') |
|||
|
|||
const accessKeyIdLabel = 'Access Key ID ' |
|||
const secretAccessKeyLabel = 'Secret Access Key ' |
|||
|
|||
function saveCredentials({ |
|||
ctx, |
|||
accessKeyId, |
|||
secretAccessKey, |
|||
useExternal, |
|||
credentialsIndex |
|||
}) { |
|||
const obj = { |
|||
provider: 'aws' |
|||
} |
|||
|
|||
if (useExternal) { |
|||
obj.useVendorConfig = true |
|||
} else { |
|||
obj.accessKeyId = accessKeyId |
|||
obj.secretAccessKey = secretAccessKey |
|||
} |
|||
|
|||
if (credentialsIndex === -1) { |
|||
// the user is not logged in
|
|||
ctx.authConfig.credentials.push(obj) |
|||
} else { |
|||
// the user is already logged in - let's replace the credentials we have
|
|||
ctx.authConfig.credentials[credentialsIndex] = obj |
|||
} |
|||
|
|||
writeToAuthConfigFile(ctx.authConfig) |
|||
|
|||
return ctx |
|||
} |
|||
|
|||
async function login(ctx) { |
|||
const credentialsIndex = ctx.authConfig.credentials.findIndex( |
|||
cred => cred.provider === 'aws' |
|||
) |
|||
|
|||
if (credentialsIndex !== -1) { |
|||
// the user is already logged in on aws
|
|||
console.log( |
|||
note(`You already have AWS credentials – this will replace them.`) |
|||
) |
|||
} |
|||
|
|||
if (await hasExternalCredentials()) { |
|||
// if the user has ~/.aws/credentials, let's ask if they want to use them
|
|||
const credentials = await getExternalCredentials() |
|||
|
|||
if (credentials.accessKeyId && credentials.secretAccessKey) { |
|||
let yes |
|||
try { |
|||
yes = await promptBool( |
|||
info( |
|||
`AWS credentials found in ${param(AWS_CREDENTIALS_FILE_PATH)}.`, |
|||
` Would you like to use them?` |
|||
), |
|||
{ |
|||
defaultValue: true |
|||
} |
|||
) |
|||
} catch (err) { |
|||
if (err.code === 'USER_ABORT') { |
|||
console.log(aborted('No changes made.')) |
|||
return 1 |
|||
} |
|||
throw err |
|||
} |
|||
|
|||
if (yes) { |
|||
ctx = saveCredentials({ ctx, useExternal: true, credentialsIndex }) |
|||
console.log( |
|||
ready(`Credentials will be read from your AWS config when needed`) |
|||
) |
|||
return |
|||
} else { |
|||
console.log(info(`Ignoring ${param(AWS_CREDENTIALS_FILE_PATH)}`)) |
|||
} |
|||
} |
|||
} |
|||
|
|||
// prettier-ignore
|
|||
console.log(info( |
|||
`We'll need your ${highlight('AWS credentials')} in order to comunicate with their API.`, |
|||
` To provision a dedicated set of tokens for ${cmd('now')}, do the following:`, |
|||
``, |
|||
` ${listItem(1, `Go to ${link('https://console.aws.amazon.com/iam')}`)}`, |
|||
` ${listItem(2, `Click on ${param('Users')} in the left menubar`)}`, |
|||
` ${listItem(3, `Click on ${param('Add user')}`)}`, |
|||
` ${listItem(4, `Give your user a name and select ${param('Programmatic access')}`)}`, |
|||
` ${listItem(5, `In the ${param('Permissions')} step, select\n` + |
|||
` ${param('Attach existing policies directly')}\n` + |
|||
` and then\n` + |
|||
` ${param('AdministratorAccess')}`)} ${italic('(or pick your own)')}`,
|
|||
` ${listItem(6, `After the ${param('Review')} step, grab your keys and paste them below:`)}`, |
|||
`` |
|||
)) |
|||
|
|||
try { |
|||
const accessKeyId = await textInput({ label: listItem(accessKeyIdLabel) }) |
|||
console.log( |
|||
`${eraseLines(1)}${green(chars.tick)} ${accessKeyIdLabel}${accessKeyId}` |
|||
) |
|||
|
|||
const secretAccessKey = await textInput({ |
|||
label: listItem(secretAccessKeyLabel) |
|||
}) |
|||
console.log( |
|||
`${eraseLines(1)}${green( |
|||
chars.tick |
|||
)} ${secretAccessKeyLabel}${secretAccessKey}` |
|||
) |
|||
|
|||
ctx = saveCredentials({ |
|||
ctx, |
|||
accessKeyId, |
|||
secretAccessKey, |
|||
credentialsIndex |
|||
}) |
|||
console.log( |
|||
ready(`Credentials saved in ${param(humanize(getAuthConfigFilePath()))}`) |
|||
) |
|||
} catch (err) { |
|||
if (err.code === 'USER_ABORT') { |
|||
console.log(aborted('No changes made.')) |
|||
return 1 |
|||
} |
|||
throw err |
|||
} |
|||
} |
|||
|
|||
module.exports = login |
@ -0,0 +1,64 @@ |
|||
// node
|
|||
const { join: joinPath } = require('path') |
|||
const { homedir } = require('os') |
|||
|
|||
// theirs
|
|||
const { readFile, exists: fileExists } = require('fs.promised') |
|||
const debug = require('debug')('now:aws:util:external-credentials') |
|||
|
|||
const AWS_CREDENTIALS_FILE_PATH = joinPath(homedir(), '.aws', 'credentials') |
|||
// matches `aws_access_key_id=aaaaa`
|
|||
// and `aws_access_key_id = aaaaa` with any number of spaces
|
|||
const ACCESS_KEY_ID_REGEX = /^aws_access_key_id(\s+)?=(\s+)?(.*)$/m |
|||
const SECRET_ACCESS_KEY_REGEX = /^aws_secret_access_key(\s+)?=(\s+)?(.*)$/m |
|||
|
|||
// checks if there's a ~/.aws/credentials
|
|||
async function hasExternalCredentials() { |
|||
let found = false |
|||
try { |
|||
found = await fileExists(AWS_CREDENTIALS_FILE_PATH) |
|||
} catch (err) { |
|||
// if this happens, we're fine:
|
|||
// 1. if the user is trying to login, let's just fallback to the manual
|
|||
// steps
|
|||
// 2. if it's the Nth time the user is using `now aws`, we know we depend
|
|||
// on this file and we'll let him know that we couldn't find the file
|
|||
// anymore upon `hasExternalCredentials() === false`
|
|||
debug(`Couldn't read ${AWS_CREDENTIALS_FILE_PATH} because of ${err}`) |
|||
} |
|||
|
|||
return found |
|||
} |
|||
|
|||
// gets the two aws tokens from ~/.aws/credentials
|
|||
// assumes the file exist – `hasExternalCredentials` should always be called
|
|||
// first
|
|||
async function getExternalCredentials() { |
|||
let contents |
|||
try { |
|||
contents = await readFile(AWS_CREDENTIALS_FILE_PATH, 'utf8') |
|||
} catch (err) { |
|||
// Here we should error because the file is there but we can't read it
|
|||
throw new Error( |
|||
`Couldn't read ${AWS_CREDENTIALS_FILE_PATH} beause of ${err.message}` |
|||
) |
|||
} |
|||
|
|||
const matchesAccessKeyId = ACCESS_KEY_ID_REGEX.exec(contents) |
|||
const matchesSecretAccessKey = SECRET_ACCESS_KEY_REGEX.exec(contents) |
|||
|
|||
return { |
|||
accessKeyId: (matchesAccessKeyId && matchesAccessKeyId[3]) || undefined, |
|||
secretAccessKey: |
|||
(matchesSecretAccessKey && matchesSecretAccessKey[3]) || undefined |
|||
} |
|||
} |
|||
|
|||
module.exports = { |
|||
hasExternalCredentials, |
|||
getExternalCredentials, |
|||
AWS_CREDENTIALS_FILE_PATH: |
|||
process.platform === 'win32' |
|||
? AWS_CREDENTIALS_FILE_PATH |
|||
: AWS_CREDENTIALS_FILE_PATH.replace(homedir(), '~') |
|||
} |
@ -0,0 +1,285 @@ |
|||
// @flow
|
|||
|
|||
// theirs
|
|||
const ms = require('ms') |
|||
const fetch = require('node-fetch') |
|||
const minimist = require('minimist') |
|||
const { gray, bold } = require('chalk') |
|||
const uid = require('uid-promise') |
|||
const bytes = require('bytes') |
|||
const sleep = require('then-sleep') |
|||
const debug = require('debug')('now:gcp:deploy') |
|||
|
|||
// ours
|
|||
const ok = require('../../util/output/ok') |
|||
const info = require('../../util/output/info') |
|||
const wait = require('../../util/output/wait') |
|||
const link = require('../../util/output/link') |
|||
const success = require('../../util/output/success') |
|||
const humanPath = require('../../util/humanize-path') |
|||
const resolve = require('../../resolve') |
|||
const error = require('../../util/output/error') |
|||
const param = require('../../util/output/param') |
|||
const build = require('../../serverless/build') |
|||
const getToken = require('./util/get-access-token') |
|||
const describeProject = require('../../describe-project') |
|||
const copyToClipboard = require('../../util/copy-to-clipboard') |
|||
const getFunctionHandler = require('./util/get-function-handler') |
|||
|
|||
const BUCKET_NAME = 'now-deployments' |
|||
|
|||
const deploy = async ctx => { |
|||
const { argv: argv_ } = ctx |
|||
const argv = minimist(argv_, { |
|||
boolean: ['help'], |
|||
alias: { |
|||
help: 'h' |
|||
} |
|||
}) |
|||
|
|||
const token = await getToken(ctx) |
|||
|
|||
// `now [provider] [deploy] [target]`
|
|||
const [cmdOrTarget = null, target_ = null] = argv._.slice(2).slice(-2) |
|||
|
|||
let target |
|||
|
|||
if (cmdOrTarget === 'gcp' || cmdOrTarget === 'deploy') { |
|||
target = target_ === null ? process.cwd() : target_ |
|||
} else { |
|||
if (target_) { |
|||
console.error(error('Unexpected number of arguments for deploy command')) |
|||
return 1 |
|||
} else { |
|||
target = cmdOrTarget === null ? process.cwd() : cmdOrTarget |
|||
} |
|||
} |
|||
|
|||
const start = Date.now() |
|||
const resolved = await resolve(target) |
|||
|
|||
if (resolved === null) { |
|||
console.error(error(`Could not resolve deployment target ${param(target)}`)) |
|||
return 1 |
|||
} |
|||
|
|||
let desc = null |
|||
|
|||
try { |
|||
desc = await describeProject(resolved) |
|||
} catch (err) { |
|||
if (err.code === 'AMBIGOUS_CONFIG') { |
|||
console.error( |
|||
error(`There is more than one source of \`now\` config: ${err.files}`) |
|||
) |
|||
return 1 |
|||
} else { |
|||
throw err |
|||
} |
|||
} |
|||
|
|||
const overrides = { |
|||
'function.js': getFunctionHandler(desc) |
|||
} |
|||
|
|||
const region = 'us-central1' |
|||
|
|||
console.log( |
|||
info( |
|||
`Deploying ${param(humanPath(resolved))} ${gray('(gcp)')} ${gray( |
|||
`(${region})` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
const buildStart = Date.now() |
|||
const stopBuildSpinner = wait('Building and bundling your app…') |
|||
const zipFile = await build(resolved, desc, { overrides }) |
|||
stopBuildSpinner() |
|||
|
|||
if (zipFile.length > 100 * 1024 * 1024) { |
|||
console.error(error('The build exceeds the 100mb GCP Functions limit')) |
|||
return 1 |
|||
} |
|||
|
|||
console.log( |
|||
ok( |
|||
`Build generated a ${bold(bytes(zipFile.length))} zip ${gray( |
|||
`[${ms(Date.now() - buildStart)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
const deploymentId = 'now-' + desc.name + '-' + (await uid(10)) |
|||
const zipFileName = `${deploymentId}.zip` |
|||
|
|||
const { project } = ctx.authConfig.credentials.find(p => p.provider === 'gcp') |
|||
|
|||
const resourcesStart = Date.now() |
|||
const stopResourcesSpinner = wait('Creating API resources') |
|||
|
|||
debug('creating gcp storage bucket') |
|||
const bucketRes = await fetch( |
|||
`https://www.googleapis.com/storage/v1/b?project=${project.id}`, |
|||
{ |
|||
method: 'POST', |
|||
headers: { |
|||
'Content-Type': 'application/json', |
|||
Authorization: `Bearer ${token}` |
|||
}, |
|||
body: JSON.stringify({ |
|||
name: BUCKET_NAME |
|||
}) |
|||
} |
|||
) |
|||
|
|||
if ( |
|||
bucketRes.status !== 200 && |
|||
bucketRes.status !== 409 /* already exists */ |
|||
) { |
|||
console.error( |
|||
error( |
|||
`Error while creating GCP Storage bucket: ${await bucketRes.text()}` |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
debug('creating gcp storage file') |
|||
const fileRes = await fetch( |
|||
`https://www.googleapis.com/upload/storage/v1/b/${BUCKET_NAME}/o?uploadType=media&name=${encodeURIComponent( |
|||
zipFileName |
|||
)}&project=${encodeURIComponent(project.id)}`,
|
|||
{ |
|||
method: 'POST', |
|||
headers: { |
|||
'Content-Type': 'application/zip', |
|||
'Content-Length': zipFile.length, |
|||
Authorization: `Bearer ${token}` |
|||
}, |
|||
body: zipFile |
|||
} |
|||
) |
|||
|
|||
try { |
|||
await assertSuccessfulResponse(fileRes) |
|||
} catch (err) { |
|||
console.error(error(err.message)) |
|||
return 1 |
|||
} |
|||
|
|||
debug('creating gcp function create') |
|||
const fnCreateRes = await fetch( |
|||
`https://cloudfunctions.googleapis.com/v1beta2/projects/${project.id}/locations/${region}/functions`, |
|||
{ |
|||
method: 'POST', |
|||
headers: { |
|||
'Content-Type': 'application/json', |
|||
Authorization: `Bearer ${token}` |
|||
}, |
|||
body: JSON.stringify({ |
|||
name: `projects/${project.id}/locations/${region}/functions/${deploymentId}`, |
|||
timeout: '15s', |
|||
availableMemoryMb: 512, |
|||
sourceArchiveUrl: `gs://${BUCKET_NAME}/${zipFileName}`, |
|||
entryPoint: 'handler', |
|||
httpsTrigger: { |
|||
url: null |
|||
} |
|||
}) |
|||
} |
|||
) |
|||
|
|||
if (403 === fnCreateRes.status) { |
|||
const url = `https://console.cloud.google.com/apis/api/cloudfunctions.googleapis.com/overview?project=${project.id}` |
|||
console.error( |
|||
error( |
|||
'GCP Permission Denied error. Make sure the "Google Cloud Functions API" ' + |
|||
`is enabled in the API Manager\n ${bold('API Manager URL')}: ${link( |
|||
url |
|||
)}` |
|||
) |
|||
) |
|||
return 1 |
|||
} |
|||
|
|||
try { |
|||
await assertSuccessfulResponse(fnCreateRes) |
|||
} catch (err) { |
|||
console.error(error(err.message)) |
|||
return 1 |
|||
} |
|||
|
|||
let retriesLeft = 10 |
|||
let status |
|||
let url |
|||
|
|||
do { |
|||
if (!--retriesLeft) { |
|||
console.error( |
|||
error('Could not determine status of the deployment: ' + url) |
|||
) |
|||
return 1 |
|||
} else { |
|||
await sleep(5000) |
|||
} |
|||
|
|||
const fnRes = await fetch( |
|||
`https://cloudfunctions.googleapis.com/v1beta2/projects/${project.id}/locations/${region}/functions/${deploymentId}`, |
|||
{ |
|||
method: 'GET', |
|||
headers: { |
|||
Authorization: `Bearer ${token}` |
|||
} |
|||
} |
|||
) |
|||
|
|||
try { |
|||
await assertSuccessfulResponse(fnRes) |
|||
} catch (err) { |
|||
console.error(error(err.message)) |
|||
return 1 |
|||
} |
|||
|
|||
;({ status, httpsTrigger: { url } } = await fnRes.json()) |
|||
} while (status !== 'READY') |
|||
|
|||
stopResourcesSpinner() |
|||
console.log( |
|||
ok( |
|||
`API resources created (id: ${param(deploymentId)}) ${gray( |
|||
`[${ms(Date.now() - resourcesStart)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
const copied = copyToClipboard(url, ctx.config.copyToClipboard) |
|||
|
|||
console.log( |
|||
success( |
|||
`${link(url)} ${copied ? gray('(in clipboard)') : ''} ${gray( |
|||
`[${ms(Date.now() - start)}]` |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
return 0 |
|||
} |
|||
|
|||
const assertSuccessfulResponse = async res => { |
|||
if (!res.ok) { |
|||
let msg |
|||
let body |
|||
|
|||
try { |
|||
body = await res.json() |
|||
} catch (err) { |
|||
msg = `An API error was returned (${res.status}), but the error code could not be diagnosed` |
|||
} |
|||
|
|||
msg = body.error.message |
|||
throw new Error(msg) |
|||
} |
|||
} |
|||
|
|||
module.exports = deploy |
@ -0,0 +1,5 @@ |
|||
const help = () => { |
|||
console.log('gcp help') |
|||
} |
|||
|
|||
module.exports = help |
@ -0,0 +1,18 @@ |
|||
module.exports = { |
|||
title: 'Google Cloud Platform', |
|||
subcommands: new Set(['help', 'login', 'deploy', 'ls']), |
|||
|
|||
// we use getters for commands to lazily get code
|
|||
// and not bog down initialization
|
|||
get help() { |
|||
return require('./help') |
|||
}, |
|||
|
|||
get deploy() { |
|||
return require('./deploy') |
|||
}, |
|||
|
|||
get login() { |
|||
return require('./login') |
|||
} |
|||
} |
@ -0,0 +1,19 @@ |
|||
// ours
|
|||
const fetch = require('./util/fetch') |
|||
const getToken = require('./util/get-access-token') |
|||
|
|||
const URL = 'https://cloudresourcemanager.googleapis.com/v1/projects' |
|||
|
|||
const projectsLs = async ctx => { |
|||
const token = await getToken(ctx) |
|||
|
|||
if (!token) { |
|||
return 1 |
|||
} |
|||
|
|||
const { projects } = await fetch({ url: URL, token }) |
|||
|
|||
return projects |
|||
} |
|||
|
|||
module.exports = projectsLs |
@ -0,0 +1,276 @@ |
|||
// node
|
|||
const { parse: parseUrl } = require('url') |
|||
const { encode: encodeQuery, stringify: formUrlEncode } = require('querystring') |
|||
const { createServer } = require('http') |
|||
|
|||
// theirs
|
|||
const opn = require('opn') |
|||
const fetch = require('node-fetch') |
|||
const debug = require('debug')('now:gcp:login') |
|||
|
|||
// ours
|
|||
const error = require('../../util/output/error') |
|||
const aborted = require('../../util/output/aborted') |
|||
const info = require('../../util/output/info') |
|||
const ready = require('../../util/output/ready') |
|||
const param = require('../../util/output/param') |
|||
const promptBool = require('../../util/input/prompt-bool') |
|||
const getNowDir = require('../../get-now-dir') |
|||
const humanize = require('../../util/humanize-path') |
|||
const saveCredentials = require('./util/save-credentials') |
|||
const promptList = require('../../util/input/list') |
|||
const listProjects = require('./list-projects') |
|||
const { writeToAuthConfigFile } = require('../../util/config-files') |
|||
|
|||
// ports that are authorized in the GCP app
|
|||
const PORTS = [8085, 8086, 8087, 8088] |
|||
const CLIENT_ID = |
|||
'258013614557-0qulvq65vqk8pi9akn7igqsquejjffil.apps.googleusercontent.com' |
|||
const CLIENT_SECRET = 'SvmeeRFmKQkIe_ZQHSe1UJ-O' |
|||
// instructs gcp to send the response in the query string
|
|||
const RESPONSE_TYPE = 'code' |
|||
const SCOPES = [ |
|||
'https://www.googleapis.com/auth/cloud-platform', |
|||
'https://www.googleapis.com/auth/appengine.admin', |
|||
'https://www.googleapis.com/auth/compute', |
|||
'https://www.googleapis.com/auth/accounts.reauth' |
|||
] |
|||
// instructs gcp to return a `refresh_token` that we'll use to seamlessly
|
|||
// get a new auth token every time the current one expires
|
|||
const ACCESS_TYPE = 'offline' |
|||
// url we'll send the user to
|
|||
const USER_URL = 'https://accounts.google.com/o/oauth2/v2/auth' |
|||
// url we'll get the access tokens from and refresh the token when needed
|
|||
const TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token' |
|||
// required by oauth2's spec
|
|||
const GRANT_TYPE = 'authorization_code' |
|||
// this ensures google *always* asks the user for permission
|
|||
// we enfore this to make sure we *always* receive a `refresh_token` (if the
|
|||
// is already authorized by the user, a `refresh_token` will *not*
|
|||
// be returned) since we need it
|
|||
const PROMPT_CONSENT = 'consent' |
|||
|
|||
const serverListen = ({ server, port }) => { |
|||
return new Promise((resolve, reject) => { |
|||
server.on('error', reject) // will happen if the port is already in use
|
|||
server.listen(port, resolve) |
|||
}) |
|||
} |
|||
|
|||
function login(ctx) { |
|||
return new Promise(async resolve => { |
|||
let credentialsIndex = ctx.authConfig.credentials.findIndex( |
|||
cred => cred.provider === 'gcp' |
|||
) |
|||
|
|||
if (credentialsIndex !== -1) { |
|||
// the user is already logged into gcp
|
|||
let yes |
|||
try { |
|||
yes = await promptBool( |
|||
info( |
|||
`You already have GCP credentials – this will replace them.`, |
|||
` Do you want to continue?` |
|||
) |
|||
) |
|||
} catch (err) { |
|||
// promptBool only `reject`s upon user abort
|
|||
// let's set it to false just to make it clear
|
|||
yes = false |
|||
} |
|||
|
|||
if (!yes) { |
|||
console.log(aborted('No changes made.')) |
|||
resolve(0) |
|||
} |
|||
} |
|||
|
|||
const ports = [...PORTS] |
|||
const server = createServer(async function handleRequest(req, res) { |
|||
const { query: { error: _error, code } } = parseUrl(req.url, true) |
|||
|
|||
if (!_error && !code) { |
|||
// the browser requesting the favicon etc
|
|||
res.end('') |
|||
return |
|||
} |
|||
|
|||
res.setHeader('content-type', 'text/html') |
|||
res.end( |
|||
`<meta charset="UTF-8">` + |
|||
`<h2>That's it – you can now return to your terminal!</h2>` |
|||
) |
|||
|
|||
if (_error) { |
|||
// the user didn't give us permission
|
|||
console.log(aborted(`No changes made.`)) |
|||
return resolve(1) |
|||
} |
|||
|
|||
if (code) { |
|||
// that's right after the user gave us permission
|
|||
// let's exchange the authorization code for an access + refresh codes
|
|||
|
|||
const body = formUrlEncode({ |
|||
code, |
|||
client_id: CLIENT_ID, |
|||
client_secret: CLIENT_SECRET, |
|||
redirect_uri: `http://${req.headers.host}`, |
|||
grant_type: GRANT_TYPE |
|||
}) |
|||
|
|||
const opts = { |
|||
method: 'POST', |
|||
headers: { |
|||
'content-type': 'application/x-www-form-urlencoded', |
|||
'content-length': body.length // just in case
|
|||
}, |
|||
body: body |
|||
} |
|||
|
|||
let accessToken |
|||
let expiresIn |
|||
let refreshToken |
|||
let response |
|||
|
|||
try { |
|||
response = await fetch(TOKEN_URL, opts) |
|||
if (response.status !== 200) { |
|||
debug( |
|||
`HTTP ${response.status} when trying to exchange the authorization code`, |
|||
await response.text() |
|||
) |
|||
console.log( |
|||
error( |
|||
`Got unexpected status code from Google: ${response.status}` |
|||
) |
|||
) |
|||
return resolve(1) |
|||
} |
|||
} catch (err) { |
|||
debug( |
|||
'unexpected error occurred while making the request to exchange the authorization code', |
|||
err.message |
|||
) |
|||
console.log( |
|||
error( |
|||
'Unexpected error occurred while authenthing with Google', |
|||
err.stack |
|||
) |
|||
) |
|||
return resolve(1) |
|||
} |
|||
|
|||
try { |
|||
const json = await response.json() |
|||
accessToken = json.access_token |
|||
expiresIn = json.expires_in |
|||
refreshToken = json.refresh_token |
|||
} catch (err) { |
|||
debug( |
|||
'unexpected error occurred while parsing the JSON from the exchange request', |
|||
err.stack, |
|||
'got', |
|||
await response.text() |
|||
) |
|||
console.log( |
|||
error( |
|||
'Unexpected error occurred while parsing the JSON response from Google', |
|||
err.message |
|||
) |
|||
) |
|||
resolve(1) |
|||
} |
|||
|
|||
const now = new Date() |
|||
// `expires_in` is 3600 seconds
|
|||
const expiresAt = now.setSeconds(now.getSeconds() + expiresIn) |
|||
ctx = saveCredentials({ |
|||
ctx, |
|||
accessToken, |
|||
expiresAt, |
|||
refreshToken, |
|||
credentialsIndex |
|||
}) |
|||
|
|||
const projects = await listProjects(ctx) |
|||
const message = 'Select a project:' |
|||
const choices = projects.map(project => { |
|||
return { |
|||
name: `${project.name} (${project.projectId})`, |
|||
value: project.projectId, |
|||
short: project.name |
|||
} |
|||
}) |
|||
|
|||
const projectId = await promptList({ |
|||
message, |
|||
choices, |
|||
separator: false |
|||
}) |
|||
|
|||
const { projectId: id, name } = projects.find( |
|||
p => p.projectId === projectId |
|||
) |
|||
|
|||
credentialsIndex = ctx.authConfig.credentials.findIndex( |
|||
cred => cred.provider === 'gcp' |
|||
) |
|||
ctx.authConfig.credentials[credentialsIndex].project = { |
|||
id, |
|||
name |
|||
} |
|||
|
|||
writeToAuthConfigFile(ctx.authConfig) |
|||
|
|||
console.log( |
|||
ready( |
|||
`Credentials and project saved in ${param(humanize(getNowDir()))}.` |
|||
) |
|||
) |
|||
resolve(1) |
|||
} |
|||
}) |
|||
|
|||
let shouldRetry = true |
|||
let portToTry = ports.shift() |
|||
|
|||
while (shouldRetry) { |
|||
try { |
|||
await serverListen({ server, port: portToTry }) |
|||
shouldRetry = false // done, listening
|
|||
} catch (err) { |
|||
if (ports.length) { |
|||
// let's try again
|
|||
portToTry = ports.shift() |
|||
} else { |
|||
// we're out of ports to try
|
|||
shouldRetry = false |
|||
} |
|||
} |
|||
} |
|||
|
|||
if (!server.listening) { |
|||
console.log( |
|||
error( |
|||
`Make sure you have one of the following TCP ports available:`, |
|||
` ${PORTS.join(', ').replace()}` |
|||
) |
|||
) |
|||
return resolve(1) |
|||
} |
|||
|
|||
const query = { |
|||
client_id: CLIENT_ID, |
|||
redirect_uri: `http://localhost:${portToTry}`, |
|||
response_type: RESPONSE_TYPE, |
|||
scope: SCOPES.join(' '), |
|||
access_type: ACCESS_TYPE, |
|||
prompt: PROMPT_CONSENT |
|||
} |
|||
|
|||
opn(USER_URL + '?' + encodeQuery(query)) |
|||
}) |
|||
} |
|||
|
|||
module.exports = login |
@ -0,0 +1,24 @@ |
|||
// node
|
|||
const { encode: encodeQuery } = require('querystring') |
|||
|
|||
// theirs
|
|||
const _fetch = require('node-fetch') |
|||
|
|||
const fetch = async ({ url, method = 'GET', token, query }) => { |
|||
url = query ? url + '?' + encodeQuery(query) : url |
|||
const headers = { |
|||
Accept: 'application/json', |
|||
Authorization: `Bearer ${token}` |
|||
} |
|||
|
|||
const res = await _fetch(url, { |
|||
method, |
|||
headers |
|||
}) |
|||
|
|||
const json = await res.json() |
|||
|
|||
return json |
|||
} |
|||
|
|||
module.exports = fetch |
@ -0,0 +1,121 @@ |
|||
// node
|
|||
const { stringify: formUrlEncode } = require('querystring') |
|||
|
|||
// theirs
|
|||
const fetch = require('node-fetch') |
|||
const debug = require('debug')('now:gcp:get_token') |
|||
|
|||
// ours
|
|||
const saveCredentials = require('./save-credentials') |
|||
const error = require('../../../util/output/error') |
|||
const cmd = require('../../../util/output/cmd') |
|||
|
|||
const CLIENT_ID = |
|||
'258013614557-0qulvq65vqk8pi9akn7igqsquejjffil.apps.googleusercontent.com' |
|||
const CLIENT_SECRET = 'SvmeeRFmKQkIe_ZQHSe1UJ-O' |
|||
// required by oauth2's spec
|
|||
const GRANT_TYPE = 'refresh_token' |
|||
const URL = 'https://www.googleapis.com/oauth2/v4/token' |
|||
|
|||
// note that this function treats the errors it can produce, printing them
|
|||
// to the user and then returns `undefined`
|
|||
const getAccessToken = async ctx => { |
|||
const credentialsIndex = ctx.authConfig.credentials.findIndex( |
|||
c => c.provider === 'gcp' |
|||
) |
|||
|
|||
if (credentialsIndex === -1) { |
|||
console.log(error(`You're not logged in! Run ${cmd('now gcp login')}.`)) |
|||
return |
|||
} |
|||
|
|||
const { accessToken, expiresAt, refreshToken } = ctx.authConfig.credentials[ |
|||
credentialsIndex |
|||
] |
|||
|
|||
if (Date.now() < expiresAt) { |
|||
// the token is still valid
|
|||
return accessToken |
|||
} |
|||
// we need to refresh the token
|
|||
const body = formUrlEncode({ |
|||
refresh_token: refreshToken, |
|||
client_id: CLIENT_ID, |
|||
client_secret: CLIENT_SECRET, |
|||
grant_type: GRANT_TYPE |
|||
}) |
|||
|
|||
const opts = { |
|||
method: 'POST', |
|||
headers: { |
|||
'content-type': 'application/x-www-form-urlencoded', |
|||
'content-length': body.length // just in case
|
|||
}, |
|||
body: body |
|||
} |
|||
|
|||
let newAccessToken |
|||
let newExpiresIn |
|||
let response |
|||
|
|||
try { |
|||
response = await fetch(URL, opts) |
|||
if (response.status !== 200) { |
|||
debug( |
|||
`HTTP ${response.status} when trying to exchange the authorization code`, |
|||
await response.text() |
|||
) |
|||
console.log( |
|||
error(`Got unexpected status code from Google: ${response.status}`) |
|||
) |
|||
return |
|||
} |
|||
} catch (err) { |
|||
debug( |
|||
'unexpected error occurred while making the request to exchange the authorization code', |
|||
err.message |
|||
) |
|||
console.log( |
|||
error( |
|||
'Unexpected error occurred while authenthing with Google', |
|||
err.stack |
|||
) |
|||
) |
|||
return |
|||
} |
|||
|
|||
try { |
|||
const json = await response.json() |
|||
newAccessToken = json.access_token |
|||
newExpiresIn = json.expires_in |
|||
} catch (err) { |
|||
debug( |
|||
'unexpected error occurred while parsing the JSON from the exchange request', |
|||
err.stack, |
|||
'got', |
|||
await response.text() |
|||
) |
|||
console.log( |
|||
error( |
|||
'Unexpected error occurred while parsing the JSON response from Google', |
|||
err.message |
|||
) |
|||
) |
|||
return |
|||
} |
|||
|
|||
const now = new Date() |
|||
// `expires_in` is 3600 seconds
|
|||
const newExpiresAt = now.setSeconds(now.getSeconds() + newExpiresIn) |
|||
saveCredentials({ |
|||
ctx, |
|||
accessToken: newAccessToken, |
|||
expiresAt: newExpiresAt, |
|||
refreshToken, |
|||
credentialsIndex |
|||
}) |
|||
|
|||
return newAccessToken |
|||
} |
|||
|
|||
module.exports = getAccessToken |
@ -0,0 +1,64 @@ |
|||
const getHandler = require('../../../serverless/get-handler') |
|||
|
|||
const getFunctionHandler = desc => { |
|||
// the command that our handler will invoke to fire up
|
|||
// the user-suppled HTTP server
|
|||
let cmd = null |
|||
let script = null |
|||
|
|||
if (desc.packageJSON) { |
|||
if (desc.packageJSON.scripts && desc.packageJSON.scripts.start) { |
|||
cmd = desc.packageJSON.scripts.start |
|||
} else { |
|||
// `node .` will use `main` or fallback to `index.js`
|
|||
script = './' |
|||
} |
|||
} else { |
|||
if (desc.hasServerJSFile) { |
|||
script = 'server.js' |
|||
} else { |
|||
script = 'index.js' |
|||
} |
|||
} |
|||
|
|||
return getHandler({ cmd, script }, (makeRequest, getPort, req, res) => { |
|||
let body |
|||
|
|||
if ('object' === typeof req.body && !(body instanceof Buffer)) { |
|||
body = JSON.stringify(req.body) |
|||
} else { |
|||
body = req.body |
|||
} |
|||
|
|||
console.log('got request', req.url, req.method, req.headers) |
|||
const proxyRequest = makeRequest( |
|||
{ |
|||
port: getPort(), |
|||
hostname: '127.0.0.1', |
|||
// TODO: figure out how to get the path?
|
|||
path: req.url, |
|||
method: req.method, |
|||
headers: req.headers |
|||
}, |
|||
proxyRes => { |
|||
proxyRes.on('data', data => { |
|||
res.write(data) |
|||
}) |
|||
proxyRes.on('error', err => { |
|||
console.error(err) |
|||
res.status(500).end() |
|||
}) |
|||
proxyRes.on('end', () => { |
|||
res.end() |
|||
}) |
|||
} |
|||
) |
|||
proxyRequest.on('error', err => { |
|||
console.error(err) |
|||
res.status(500).end() |
|||
}) |
|||
proxyRequest.end(body) |
|||
}) |
|||
} |
|||
|
|||
module.exports = getFunctionHandler |
@ -0,0 +1,31 @@ |
|||
const { writeToAuthConfigFile } = require('../../../util/config-files') |
|||
|
|||
const saveCredentials = ({ |
|||
ctx, |
|||
accessToken, |
|||
expiresAt, |
|||
refreshToken, |
|||
credentialsIndex |
|||
}) => { |
|||
const current = ctx.authConfig.credentials[credentialsIndex] || {} |
|||
const obj = Object.assign({}, current, { |
|||
provider: 'gcp', |
|||
accessToken, |
|||
expiresAt, |
|||
refreshToken |
|||
}) |
|||
|
|||
if (credentialsIndex === -1) { |
|||
// the user is not logged in
|
|||
ctx.authConfig.credentials.push(obj) |
|||
} else { |
|||
// the user is already logged in - let's replace the credentials we have
|
|||
ctx.authConfig.credentials[credentialsIndex] = obj |
|||
} |
|||
|
|||
writeToAuthConfigFile(ctx.authConfig) |
|||
|
|||
return ctx |
|||
} |
|||
|
|||
module.exports = saveCredentials |
@ -0,0 +1,6 @@ |
|||
// @flow
|
|||
module.exports = { |
|||
sh: require('./sh'), |
|||
aws: require('./aws'), |
|||
gcp: require('./gcp') |
|||
} |
@ -0,0 +1,853 @@ |
|||
#!/usr/bin/env node
|
|||
|
|||
// Native
|
|||
const { resolve, basename } = require('path') |
|||
|
|||
// Packages
|
|||
const Progress = require('progress') |
|||
const fs = require('fs-extra') |
|||
const bytes = require('bytes') |
|||
const chalk = require('chalk') |
|||
const minimist = require('minimist') |
|||
const ms = require('ms') |
|||
const dotenv = require('dotenv') |
|||
const { eraseLines } = require('ansi-escapes') |
|||
const { write: copy } = require('clipboardy') |
|||
const inquirer = require('inquirer') |
|||
|
|||
// Ours
|
|||
const Logger = require('./legacy/build-logger') |
|||
const Now = require('./legacy/now.js') |
|||
const toHumanPath = require('../../util/humanize-path') |
|||
const { handleError, error } = require('./legacy/error') |
|||
const { fromGit, isRepoPath, gitPathParts } = require('./legacy/git') |
|||
const readMetaData = require('./legacy/read-metadata') |
|||
const checkPath = require('./legacy/check-path') |
|||
const logo = require('../../util/output/logo') |
|||
const cmd = require('../../util/output/cmd') |
|||
const info = require('../../util/output/info') |
|||
const wait = require('../../util/output/wait') |
|||
const NowPlans = require('./legacy/plans') |
|||
const promptBool = require('../../util/input/prompt-bool') |
|||
const promptOptions = require('./legacy/prompt-options') |
|||
const note = require('../../util/output/note') |
|||
|
|||
const minimistOpts = { |
|||
string: ['config', 'token', 'name', 'alias', 'session-affinity'], |
|||
boolean: [ |
|||
'help', |
|||
'version', |
|||
'debug', |
|||
'force', |
|||
'links', |
|||
'login', |
|||
'no-clipboard', |
|||
'forward-npm', |
|||
'docker', |
|||
'npm', |
|||
'static' |
|||
], |
|||
alias: { |
|||
env: 'e', |
|||
dotenv: 'E', |
|||
help: 'h', |
|||
config: 'c', |
|||
debug: 'd', |
|||
version: 'v', |
|||
force: 'f', |
|||
token: 't', |
|||
forceSync: 'F', |
|||
links: 'l', |
|||
login: 'L', |
|||
public: 'p', |
|||
'no-clipboard': 'C', |
|||
'forward-npm': 'N', |
|||
'session-affinity': 'S', |
|||
name: 'n', |
|||
alias: 'a' |
|||
} |
|||
} |
|||
|
|||
const help = () => { |
|||
console.log(` |
|||
${chalk.bold(`${logo()} now`)} [options] <command | path> |
|||
|
|||
${chalk.dim('Commands:')} |
|||
|
|||
${chalk.dim('Cloud')} |
|||
|
|||
deploy [path] Performs a deployment ${chalk.bold( |
|||
'(default)' |
|||
)} |
|||
ls | list [app] List deployments |
|||
rm | remove [id] Remove a deployment |
|||
ln | alias [id] [url] Configures aliases for deployments |
|||
domains [name] Manages your domain names |
|||
certs [cmd] Manages your SSL certificates |
|||
secrets [name] Manages your secret environment variables |
|||
dns [name] Manages your DNS records |
|||
logs [url] Displays the logs for a deployment |
|||
scale [args] Scales the instance count of a deployment |
|||
help [cmd] Displays complete help for [cmd] |
|||
|
|||
${chalk.dim('Administrative')} |
|||
|
|||
billing | cc [cmd] Manages your credit cards and billing methods |
|||
upgrade | downgrade [plan] Upgrades or downgrades your plan |
|||
teams [team] Manages your teams |
|||
switch Switches between teams and your account |
|||
login Login into your account or creates a new one |
|||
logout Logout from your account |
|||
|
|||
${chalk.dim('Options:')} |
|||
|
|||
-h, --help Output usage information |
|||
-v, --version Output the version number |
|||
-n, --name Set the name of the deployment |
|||
-c ${chalk.underline('FILE')}, --config=${chalk.underline( |
|||
'FILE' |
|||
)} Config file |
|||
-d, --debug Debug mode [off] |
|||
-f, --force Force a new deployment even if nothing has changed |
|||
-t ${chalk.underline('TOKEN')}, --token=${chalk.underline( |
|||
'TOKEN' |
|||
)} Login token |
|||
-L, --login Configure login |
|||
-l, --links Copy symlinks without resolving their target |
|||
-p, --public Deployment is public (${chalk.dim( |
|||
'`/_src`' |
|||
)} is exposed) [on for oss, off for premium] |
|||
-e, --env Include an env var (e.g.: ${chalk.dim( |
|||
'`-e KEY=value`' |
|||
)}). Can appear many times. |
|||
-E ${chalk.underline('FILE')}, --dotenv=${chalk.underline( |
|||
'FILE' |
|||
)} Include env vars from .env file. Defaults to '.env' |
|||
-C, --no-clipboard Do not attempt to copy URL to clipboard |
|||
-N, --forward-npm Forward login information to install private npm modules |
|||
--session-affinity Session affinity, \`ip\` (default) or \`random\` to control session affinity.
|
|||
|
|||
${chalk.dim( |
|||
'Enforcable Types (when both package.json and Dockerfile exist):' |
|||
)} |
|||
|
|||
--npm Node.js application |
|||
--docker Docker container |
|||
--static Static file hosting |
|||
|
|||
${chalk.dim('Examples:')} |
|||
|
|||
${chalk.gray('–')} Deploys the current directory |
|||
|
|||
${chalk.cyan('$ now')} |
|||
|
|||
${chalk.gray('–')} Deploys a custom path ${chalk.dim('`/usr/src/project`')} |
|||
|
|||
${chalk.cyan('$ now /usr/src/project')} |
|||
|
|||
${chalk.gray('–')} Deploys a GitHub repository |
|||
|
|||
${chalk.cyan('$ now user/repo#ref')} |
|||
|
|||
${chalk.gray('–')} Deploys a GitHub, GitLab or Bitbucket repo using its URL |
|||
|
|||
${chalk.cyan('$ now https://gitlab.com/user/repo')} |
|||
|
|||
${chalk.gray('–')} Deploys with ENV vars |
|||
|
|||
${chalk.cyan( |
|||
'$ now -e NODE_ENV=production -e MYSQL_PASSWORD=@mysql-password' |
|||
)} |
|||
|
|||
${chalk.gray('–')} Displays comprehensive help for the subcommand ${chalk.dim( |
|||
'`list`' |
|||
)} |
|||
|
|||
${chalk.cyan('$ now help list')} |
|||
`)
|
|||
} |
|||
|
|||
let argv |
|||
let path |
|||
|
|||
// Options
|
|||
let forceNew |
|||
let deploymentName |
|||
let sessionAffinity |
|||
let debug |
|||
let clipboard |
|||
let forwardNpm |
|||
let forceSync |
|||
let followSymlinks |
|||
let wantsPublic |
|||
let apiUrl |
|||
let isTTY |
|||
let quiet |
|||
let alwaysForwardNpm |
|||
|
|||
// If the current deployment is a repo
|
|||
const gitRepo = {} |
|||
|
|||
const stopDeployment = msg => { |
|||
handleError(msg) |
|||
process.exit(1) |
|||
} |
|||
|
|||
const envFields = async list => { |
|||
const questions = [] |
|||
|
|||
for (const field of list) { |
|||
questions.push({ |
|||
name: field, |
|||
message: field |
|||
}) |
|||
} |
|||
|
|||
// eslint-disable-next-line import/no-unassigned-import
|
|||
require('../../lib/util/input/patch-inquirer') |
|||
|
|||
console.log( |
|||
info('Please enter the values for the following environment variables:') |
|||
) |
|||
const answers = await inquirer.prompt(questions) |
|||
|
|||
for (const answer in answers) { |
|||
if (!{}.hasOwnProperty.call(answers, answer)) { |
|||
continue |
|||
} |
|||
|
|||
const content = answers[answer] |
|||
|
|||
if (content === '') { |
|||
stopDeployment(`Enter a value for ${answer}`) |
|||
} |
|||
} |
|||
|
|||
return answers |
|||
} |
|||
|
|||
async function main(ctx) { |
|||
argv = minimist(ctx.argv.slice(2), minimistOpts) |
|||
|
|||
// very ugly hack – this (now-cli's code) expects that `argv._[0]` is the path
|
|||
// we should fix this ASAP
|
|||
if (argv._[0] === 'sh') { |
|||
argv._.shift() |
|||
} |
|||
if (argv._[0] === 'deploy') { |
|||
argv._.shift() |
|||
} |
|||
|
|||
if (path) { |
|||
// If path is relative: resolve
|
|||
// if path is absolute: clear up strange `/` etc
|
|||
path = resolve(process.cwd(), path) |
|||
} else { |
|||
path = process.cwd() |
|||
} |
|||
|
|||
// Options
|
|||
forceNew = argv.force |
|||
deploymentName = argv.name |
|||
sessionAffinity = argv['session-affinity'] |
|||
debug = argv.debug |
|||
clipboard = !argv['no-clipboard'] |
|||
forwardNpm = argv['forward-npm'] |
|||
forceSync = argv.forceSync |
|||
followSymlinks = !argv.links |
|||
wantsPublic = argv.public |
|||
apiUrl = argv.url || 'https://api.zeit.co' |
|||
isTTY = process.stdout.isTTY |
|||
quiet = !isTTY |
|||
if (argv.h || argv.help) { |
|||
help() |
|||
return 0 |
|||
} |
|||
|
|||
let { token } = |
|||
ctx.authConfig.credentials.find(c => c.provider === 'sh') || {} |
|||
|
|||
if (!token) { |
|||
// node file sh [...]
|
|||
const sh = argv[2] === 'sh' |
|||
const _cmd = `now ${sh ? 'sh ' : ''}login` |
|||
console.log(error(`You're not logged in! Please run ${cmd(_cmd)}`)) |
|||
return 1 |
|||
} |
|||
|
|||
const config = ctx.config.sh |
|||
|
|||
alwaysForwardNpm = config.forwardNpm |
|||
|
|||
if (argv.config) { |
|||
// TODO enable this
|
|||
// cfg.setConfigFile(argv.config)
|
|||
} |
|||
|
|||
try { |
|||
return sync({ token, config }) |
|||
} catch (err) { |
|||
return stopDeployment(err) |
|||
} |
|||
} |
|||
|
|||
async function sync({ token, config: { currentTeam, user } }) { |
|||
return new Promise(async (_resolve, reject) => { |
|||
const start = Date.now() |
|||
const rawPath = argv._[0] |
|||
|
|||
const planPromise = new NowPlans({ |
|||
apiUrl, |
|||
token, |
|||
debug, |
|||
currentTeam |
|||
}).getCurrent() |
|||
|
|||
try { |
|||
await fs.stat(path) |
|||
} catch (err) { |
|||
let repo |
|||
let isValidRepo = false |
|||
try { |
|||
isValidRepo = isRepoPath(rawPath) |
|||
} catch (_err) { |
|||
if (err.code === 'INVALID_URL') { |
|||
stopDeployment(_err) |
|||
} else { |
|||
reject(_err) |
|||
} |
|||
} |
|||
|
|||
if (isValidRepo) { |
|||
const gitParts = gitPathParts(rawPath) |
|||
Object.assign(gitRepo, gitParts) |
|||
|
|||
const searchMessage = setTimeout(() => { |
|||
console.log( |
|||
`> Didn't find directory. Searching on ${gitRepo.type}...` |
|||
) |
|||
}, 500) |
|||
|
|||
try { |
|||
repo = await fromGit(rawPath, debug) |
|||
} catch (_err) { |
|||
// why is this ignored?
|
|||
} |
|||
|
|||
clearTimeout(searchMessage) |
|||
} |
|||
|
|||
if (repo) { |
|||
// Tell now which directory to deploy
|
|||
path = repo.path |
|||
|
|||
// Set global variable for deleting tmp dir later
|
|||
// once the deployment has finished
|
|||
Object.assign(gitRepo, repo) |
|||
} else if (isValidRepo) { |
|||
const gitRef = gitRepo.ref ? `with "${chalk.bold(gitRepo.ref)}" ` : '' |
|||
stopDeployment( |
|||
`There's no repository named "${chalk.bold( |
|||
gitRepo.main |
|||
)}" ${gitRef}on ${gitRepo.type}` |
|||
) |
|||
} else { |
|||
error(`The specified directory "${basename(path)}" doesn't exist.`) |
|||
process.exit(1) |
|||
} |
|||
} |
|||
|
|||
// Make sure that directory is deployable
|
|||
try { |
|||
await checkPath(path) |
|||
} catch (err) { |
|||
error(err) |
|||
return |
|||
} |
|||
|
|||
if (!quiet) { |
|||
if (gitRepo.main) { |
|||
const gitRef = gitRepo.ref ? ` at "${chalk.bold(gitRepo.ref)}" ` : '' |
|||
console.log( |
|||
`> Deploying ${gitRepo.type} repository "${chalk.bold( |
|||
gitRepo.main |
|||
)}" ${gitRef} under ${chalk.bold( |
|||
(currentTeam && currentTeam.slug) || user.username || user.email |
|||
)}` |
|||
) |
|||
} else { |
|||
console.log( |
|||
`> Deploying ${chalk.bold(toHumanPath(path))} under ${chalk.bold( |
|||
(currentTeam && currentTeam.slug) || user.username || user.email |
|||
)}` |
|||
) |
|||
} |
|||
} |
|||
|
|||
let deploymentType |
|||
|
|||
// CLI deployment type explicit overrides
|
|||
if (argv.docker) { |
|||
if (debug) { |
|||
console.log(`> [debug] Forcing \`deploymentType\` = \`docker\``) |
|||
} |
|||
|
|||
deploymentType = 'docker' |
|||
} else if (argv.npm) { |
|||
if (debug) { |
|||
console.log(`> [debug] Forcing \`deploymentType\` = \`npm\``) |
|||
} |
|||
|
|||
deploymentType = 'npm' |
|||
} else if (argv.static) { |
|||
if (debug) { |
|||
console.log(`> [debug] Forcing \`deploymentType\` = \`static\``) |
|||
} |
|||
|
|||
deploymentType = 'static' |
|||
} |
|||
|
|||
let meta |
|||
;({ |
|||
meta, |
|||
deploymentName, |
|||
deploymentType, |
|||
sessionAffinity |
|||
} = await readMeta(path, deploymentName, deploymentType, sessionAffinity)) |
|||
const nowConfig = meta.nowConfig |
|||
|
|||
const now = new Now({ apiUrl, token, debug, currentTeam }) |
|||
|
|||
let dotenvConfig |
|||
let dotenvOption |
|||
|
|||
if (argv.dotenv) { |
|||
dotenvOption = argv.dotenv |
|||
} else if (nowConfig && nowConfig.dotenv) { |
|||
dotenvOption = nowConfig.dotenv |
|||
} |
|||
|
|||
if (dotenvOption) { |
|||
const dotenvFileName = |
|||
typeof dotenvOption === 'string' ? dotenvOption : '.env' |
|||
|
|||
if (!fs.existsSync(dotenvFileName)) { |
|||
error(`--dotenv flag is set but ${dotenvFileName} file is missing`) |
|||
return process.exit(1) |
|||
} |
|||
|
|||
const dotenvFile = await fs.readFile(dotenvFileName) |
|||
dotenvConfig = dotenv.parse(dotenvFile) |
|||
} |
|||
|
|||
let pkgEnv = nowConfig && nowConfig.env |
|||
const argEnv = [].concat(argv.env || []) |
|||
|
|||
if (pkgEnv && Array.isArray(nowConfig.env)) { |
|||
const defined = argEnv.join() |
|||
const askFor = nowConfig.env.filter(item => !defined.includes(`${item}=`)) |
|||
|
|||
pkgEnv = await envFields(askFor) |
|||
} |
|||
|
|||
// Merge `now.env` from package.json with `-e` arguments
|
|||
const envs = [ |
|||
...Object.keys(dotenvConfig || {}).map(k => `${k}=${dotenvConfig[k]}`), |
|||
...Object.keys(pkgEnv || {}).map(k => `${k}=${pkgEnv[k]}`), |
|||
...argEnv |
|||
] |
|||
|
|||
let secrets |
|||
const findSecret = async uidOrName => { |
|||
if (!secrets) { |
|||
secrets = await now.listSecrets() |
|||
} |
|||
|
|||
return secrets.filter(secret => { |
|||
return secret.name === uidOrName || secret.uid === uidOrName |
|||
}) |
|||
} |
|||
|
|||
const env_ = await Promise.all( |
|||
envs.map(async kv => { |
|||
if (typeof kv !== 'string') { |
|||
error('Env key and value missing') |
|||
return process.exit(1) |
|||
} |
|||
|
|||
const [key, ...rest] = kv.split('=') |
|||
let val |
|||
|
|||
if (rest.length > 0) { |
|||
val = rest.join('=') |
|||
} |
|||
|
|||
if (/[^A-z0-9_]/i.test(key)) { |
|||
error( |
|||
`Invalid ${chalk.dim('-e')} key ${chalk.bold( |
|||
`"${chalk.bold(key)}"` |
|||
)}. Only letters, digits and underscores are allowed.` |
|||
) |
|||
return process.exit(1) |
|||
} |
|||
|
|||
if (!key) { |
|||
error(`Invalid env option ${chalk.bold(`"${kv}"`)}`) |
|||
return process.exit(1) |
|||
} |
|||
|
|||
if (val === undefined) { |
|||
if (key in process.env) { |
|||
console.log( |
|||
`> Reading ${chalk.bold( |
|||
`"${chalk.bold(key)}"` |
|||
)} from your env (as no value was specified)` |
|||
) |
|||
// Escape value if it begins with @
|
|||
val = process.env[key].replace(/^@/, '\\@') |
|||
} else { |
|||
error( |
|||
`No value specified for env ${chalk.bold( |
|||
`"${chalk.bold(key)}"` |
|||
)} and it was not found in your env.` |
|||
) |
|||
return process.exit(1) |
|||
} |
|||
} |
|||
|
|||
if (val[0] === '@') { |
|||
const uidOrName = val.substr(1) |
|||
const _secrets = await findSecret(uidOrName) |
|||
if (_secrets.length === 0) { |
|||
if (uidOrName === '') { |
|||
error( |
|||
`Empty reference provided for env key ${chalk.bold( |
|||
`"${chalk.bold(key)}"` |
|||
)}` |
|||
) |
|||
} else { |
|||
error( |
|||
`No secret found by uid or name ${chalk.bold(`"${uidOrName}"`)}` |
|||
) |
|||
} |
|||
return process.exit(1) |
|||
} else if (_secrets.length > 1) { |
|||
error( |
|||
`Ambiguous secret ${chalk.bold( |
|||
`"${uidOrName}"` |
|||
)} (matches ${chalk.bold(_secrets.length)} secrets)` |
|||
) |
|||
return process.exit(1) |
|||
} |
|||
|
|||
val = { uid: _secrets[0].uid } |
|||
} |
|||
|
|||
return [key, typeof val === 'string' ? val.replace(/^\\@/, '@') : val] |
|||
}) |
|||
) |
|||
|
|||
const env = {} |
|||
env_.filter(v => Boolean(v)).forEach(([key, val]) => { |
|||
if (key in env) { |
|||
console.log( |
|||
note(`Overriding duplicate env key ${chalk.bold(`"${key}"`)}`) |
|||
) |
|||
} |
|||
|
|||
env[key] = val |
|||
}) |
|||
|
|||
try { |
|||
await now.create( |
|||
path, |
|||
Object.assign( |
|||
{ |
|||
env, |
|||
followSymlinks, |
|||
forceNew, |
|||
forceSync, |
|||
forwardNpm: alwaysForwardNpm || forwardNpm, |
|||
quiet, |
|||
wantsPublic, |
|||
sessionAffinity |
|||
}, |
|||
meta |
|||
) |
|||
) |
|||
} catch (err) { |
|||
if (debug) { |
|||
console.log(`> [debug] error: ${err}\n${err.stack}`) |
|||
} |
|||
|
|||
return stopDeployment(err) |
|||
} |
|||
|
|||
const { url } = now |
|||
const elapsed = ms(new Date() - start) |
|||
|
|||
if (isTTY) { |
|||
if (clipboard) { |
|||
try { |
|||
await copy(url) |
|||
console.log( |
|||
`${chalk.cyan('> Ready!')} ${chalk.bold( |
|||
url |
|||
)} (copied to clipboard) [${elapsed}]` |
|||
) |
|||
} catch (err) { |
|||
console.log( |
|||
`${chalk.cyan('> Ready!')} ${chalk.bold(url)} [${elapsed}]` |
|||
) |
|||
} |
|||
} else { |
|||
console.log(`> ${url} [${elapsed}]`) |
|||
} |
|||
} else { |
|||
process.stdout.write(url) |
|||
} |
|||
|
|||
const startU = new Date() |
|||
|
|||
const complete = ({ syncCount }) => { |
|||
if (!quiet) { |
|||
const elapsedU = ms(new Date() - startU) |
|||
console.log( |
|||
`> Synced ${syncCount} (${bytes(now.syncAmount)}) [${elapsedU}] ` |
|||
) |
|||
console.log('> Initializing…') |
|||
} |
|||
|
|||
// Close http2 agent
|
|||
now.close() |
|||
|
|||
// Show build logs
|
|||
if (!quiet) { |
|||
if (deploymentType === 'static') { |
|||
console.log(`${chalk.cyan('> Deployment complete!')}`) |
|||
} else { |
|||
printLogs(now.host, token, currentTeam, user) |
|||
} |
|||
} |
|||
} |
|||
|
|||
const plan = await planPromise |
|||
|
|||
if (plan.id === 'oss' && !wantsPublic) { |
|||
if (isTTY) { |
|||
console.log( |
|||
info( |
|||
`${chalk.bold( |
|||
(currentTeam && `${currentTeam.slug} is`) || |
|||
`You (${user.username || user.email}) are` |
|||
)} on the OSS plan. Your code and logs will be made ${chalk.bold( |
|||
'public' |
|||
)}.` |
|||
) |
|||
) |
|||
|
|||
const proceed = await promptBool( |
|||
'Are you sure you want to proceed with the deployment?', |
|||
{ trailing: eraseLines(1) } |
|||
) |
|||
|
|||
if (proceed) { |
|||
console.log( |
|||
note(`You can use ${cmd('now --public')} to skip this prompt`) |
|||
) |
|||
} else { |
|||
const stopSpinner = wait('Canceling deployment') |
|||
await now.remove(now.id, { hard: true }) |
|||
stopSpinner() |
|||
console.log( |
|||
info( |
|||
'Deployment aborted. No files were synced.', |
|||
` You can upgrade by running ${cmd('now upgrade')}.` |
|||
) |
|||
) |
|||
return 0 |
|||
} |
|||
} else if (!wantsPublic) { |
|||
const msg = |
|||
'\nYou are on the OSS plan. Your code and logs will be made public.' + |
|||
' If you agree with that, please run again with --public.' |
|||
return stopDeployment(msg) |
|||
} |
|||
} |
|||
|
|||
if (now.syncAmount) { |
|||
if (debug && now.syncFileCount !== now.fileCount) { |
|||
console.log( |
|||
`> [debug] total files ${now.fileCount}, ${now.syncFileCount} changed. ` |
|||
) |
|||
} |
|||
const size = bytes(now.syncAmount) |
|||
const syncCount = `${now.syncFileCount} file${now.syncFileCount > 1 |
|||
? 's' |
|||
: ''}` |
|||
const bar = new Progress( |
|||
`> Upload [:bar] :percent :etas (${size}) [${syncCount}]`, |
|||
{ |
|||
width: 20, |
|||
complete: '=', |
|||
incomplete: '', |
|||
total: now.syncAmount, |
|||
clear: true |
|||
} |
|||
) |
|||
|
|||
now.upload() |
|||
|
|||
now.on('upload', ({ names, data }) => { |
|||
const amount = data.length |
|||
if (debug) { |
|||
console.log( |
|||
`> [debug] Uploaded: ${names.join(' ')} (${bytes(data.length)})` |
|||
) |
|||
} |
|||
bar.tick(amount) |
|||
}) |
|||
|
|||
now.on('complete', () => complete({ syncCount })) |
|||
|
|||
now.on('error', err => { |
|||
error('Upload failed') |
|||
return stopDeployment(err) |
|||
}) |
|||
} else { |
|||
if (!quiet) { |
|||
console.log(`> Initializing…`) |
|||
} |
|||
|
|||
// Close http2 agent
|
|||
now.close() |
|||
|
|||
// Show build logs
|
|||
if (!quiet) { |
|||
if (deploymentType === 'static') { |
|||
console.log(`${chalk.cyan('> Deployment complete!')}`) |
|||
} else { |
|||
printLogs(now.host, token, currentTeam, user) |
|||
} |
|||
} |
|||
} |
|||
}) |
|||
} |
|||
|
|||
async function readMeta( |
|||
_path, |
|||
_deploymentName, |
|||
deploymentType, |
|||
_sessionAffinity |
|||
) { |
|||
try { |
|||
const meta = await readMetaData(_path, { |
|||
deploymentType, |
|||
deploymentName: _deploymentName, |
|||
quiet: true, |
|||
sessionAffinity: _sessionAffinity |
|||
}) |
|||
|
|||
if (!deploymentType) { |
|||
deploymentType = meta.type |
|||
|
|||
if (debug) { |
|||
console.log( |
|||
`> [debug] Detected \`deploymentType\` = \`${deploymentType}\`` |
|||
) |
|||
} |
|||
} |
|||
|
|||
if (!_deploymentName) { |
|||
_deploymentName = meta.name |
|||
|
|||
if (debug) { |
|||
console.log( |
|||
`> [debug] Detected \`deploymentName\` = "${_deploymentName}"` |
|||
) |
|||
} |
|||
} |
|||
|
|||
return { |
|||
meta, |
|||
deploymentName: _deploymentName, |
|||
deploymentType, |
|||
sessionAffinity: _sessionAffinity |
|||
} |
|||
} catch (err) { |
|||
if (isTTY && err.code === 'MULTIPLE_MANIFESTS') { |
|||
if (debug) { |
|||
console.log('> [debug] Multiple manifests found, disambiguating') |
|||
} |
|||
|
|||
console.log( |
|||
`> Two manifests found. Press [${chalk.bold( |
|||
'n' |
|||
)}] to deploy or re-run with --flag` |
|||
) |
|||
|
|||
deploymentType = await promptOptions([ |
|||
['npm', `${chalk.bold('package.json')}\t${chalk.gray(' --npm')} `], |
|||
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `] |
|||
]) |
|||
|
|||
if (debug) { |
|||
console.log( |
|||
`> [debug] Selected \`deploymentType\` = "${deploymentType}"` |
|||
) |
|||
} |
|||
|
|||
return readMeta(_path, _deploymentName, deploymentType) |
|||
} |
|||
throw err |
|||
} |
|||
} |
|||
|
|||
function printLogs(host, token) { |
|||
// Log build
|
|||
const logger = new Logger(host, token, { debug, quiet }) |
|||
|
|||
logger.on('error', async err => { |
|||
if (!quiet) { |
|||
if (err && err.type === 'BUILD_ERROR') { |
|||
error( |
|||
`The build step of your project failed. To retry, run ${cmd( |
|||
'now --force' |
|||
)}.` |
|||
) |
|||
} else { |
|||
error('Deployment failed') |
|||
} |
|||
} |
|||
|
|||
if (gitRepo && gitRepo.cleanup) { |
|||
// Delete temporary directory that contains repository
|
|||
gitRepo.cleanup() |
|||
|
|||
if (debug) { |
|||
console.log(`> [debug] Removed temporary repo directory`) |
|||
} |
|||
} |
|||
|
|||
process.exit(1) |
|||
}) |
|||
|
|||
logger.on('close', async () => { |
|||
if (!quiet) { |
|||
console.log(`${chalk.cyan('> Deployment complete!')}`) |
|||
} |
|||
|
|||
if (gitRepo && gitRepo.cleanup) { |
|||
// Delete temporary directory that contains repository
|
|||
gitRepo.cleanup() |
|||
|
|||
if (debug) { |
|||
console.log(`> [debug] Removed temporary repo directory`) |
|||
} |
|||
} |
|||
|
|||
process.exit(0) |
|||
}) |
|||
} |
|||
|
|||
module.exports = main |
@ -0,0 +1,13 @@ |
|||
module.exports = { |
|||
title: 'now.sh', |
|||
subcommands: new Set(['help', 'login', 'deploy', 'ls']), |
|||
get deploy() { |
|||
return require('./deploy') |
|||
}, |
|||
get help() { |
|||
return require('./help') |
|||
}, |
|||
get login() { |
|||
return require('./login') |
|||
} |
|||
} |
@ -0,0 +1,83 @@ |
|||
// Native
|
|||
const { parse } = require('url') |
|||
const http = require('http') |
|||
const https = require('https') |
|||
|
|||
// Packages
|
|||
const fetch = require('node-fetch') |
|||
|
|||
/** |
|||
* Returns a `fetch` version with a similar |
|||
* API to the browser's configured with a |
|||
* HTTP2 agent. |
|||
* |
|||
* It encodes `body` automatically as JSON. |
|||
* |
|||
* @param {String} host |
|||
* @return {Function} fetch |
|||
*/ |
|||
|
|||
module.exports = class Agent { |
|||
constructor(url, { tls = true, debug } = {}) { |
|||
this._url = url |
|||
const parsed = parse(url) |
|||
this._protocol = parsed.protocol |
|||
this._debug = debug |
|||
if (tls) { |
|||
this._initAgent() |
|||
} |
|||
} |
|||
|
|||
_initAgent() { |
|||
const module = this._protocol === 'https:' ? https : http |
|||
this._agent = new module.Agent({ |
|||
keepAlive: true, |
|||
keepAliveMsecs: 10000, |
|||
maxSockets: 8 |
|||
}).on('error', err => this._onError(err, this._agent)) |
|||
} |
|||
|
|||
_onError(err, agent) { |
|||
if (this._debug) { |
|||
console.log(`> [debug] agent connection error ${err}\n${err.stack}`) |
|||
} |
|||
if (this._agent === agent) { |
|||
this._agent = null |
|||
} |
|||
} |
|||
|
|||
fetch(path, opts = {}) { |
|||
if (!this._agent) { |
|||
if (this._debug) { |
|||
console.log('> [debug] re-initializing agent') |
|||
} |
|||
this._initAgent() |
|||
} |
|||
|
|||
const { body } = opts |
|||
if (this._agent) { |
|||
opts.agent = this._agent |
|||
} |
|||
|
|||
if (body && typeof body === 'object' && typeof body.pipe !== 'function') { |
|||
opts.headers['Content-Type'] = 'application/json' |
|||
opts.body = JSON.stringify(body) |
|||
} |
|||
|
|||
if (opts.body && typeof body.pipe !== 'function') { |
|||
opts.headers['Content-Length'] = Buffer.byteLength(opts.body) |
|||
} |
|||
|
|||
return fetch(this._url + path, opts) |
|||
} |
|||
|
|||
close() { |
|||
if (this._debug) { |
|||
console.log('> [debug] closing agent') |
|||
} |
|||
|
|||
if (this._agent) { |
|||
this._agent.destroy() |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,138 @@ |
|||
// Native
|
|||
const EventEmitter = require('events') |
|||
|
|||
// Packages
|
|||
const io = require('socket.io-client') |
|||
const chalk = require('chalk') |
|||
|
|||
const { compare, deserialize } = require('./logs') |
|||
|
|||
module.exports = class Logger extends EventEmitter { |
|||
constructor(host, token, { debug = false, quiet = false } = {}) { |
|||
super() |
|||
this.host = host |
|||
this.token = token |
|||
this.debug = debug |
|||
this.quiet = quiet |
|||
|
|||
// ReadyState
|
|||
this.building = false |
|||
|
|||
this.socket = io(`https://io.now.sh/states?host=${host}&v=2`) |
|||
this.socket.once('error', this.onSocketError.bind(this)) |
|||
this.socket.on('auth', this.onAuth.bind(this)) |
|||
this.socket.on('state', this.onState.bind(this)) |
|||
this.socket.on('logs', this.onLog.bind(this)) |
|||
this.socket.on('backend', this.onComplete.bind(this)) |
|||
|
|||
// Log buffer
|
|||
this.buf = [] |
|||
this.printed = new Set() |
|||
} |
|||
|
|||
onAuth(callback) { |
|||
if (this.debug) { |
|||
console.log('> [debug] authenticate') |
|||
} |
|||
callback(this.token) |
|||
} |
|||
|
|||
onState(state) { |
|||
// Console.log(state)
|
|||
if (!state.id) { |
|||
console.error('> Deployment not found') |
|||
this.emit('error') |
|||
return |
|||
} |
|||
|
|||
if (state.error) { |
|||
this.emit('error', state) |
|||
return |
|||
} |
|||
|
|||
if (state.backend) { |
|||
this.onComplete() |
|||
return |
|||
} |
|||
|
|||
if (state.logs) { |
|||
state.logs.forEach(this.onLog, this) |
|||
} |
|||
} |
|||
|
|||
onLog(log) { |
|||
if (!this.building) { |
|||
if (!this.quiet) { |
|||
console.log('> Building') |
|||
} |
|||
this.building = true |
|||
} |
|||
|
|||
if (this.quiet) { |
|||
return |
|||
} |
|||
|
|||
log = deserialize(log) |
|||
|
|||
const timer = setTimeout(() => { |
|||
this.buf.sort((a, b) => compare(a.log, b.log)) |
|||
const idx = this.buf.findIndex(b => b.log.id === log.id) + 1 |
|||
for (const b of this.buf.slice(0, idx)) { |
|||
clearTimeout(b.timer) |
|||
this.printLog(b.log) |
|||
} |
|||
this.buf = this.buf.slice(idx) |
|||
}, 500) |
|||
|
|||
this.buf.push({ log, timer }) |
|||
} |
|||
|
|||
onComplete() { |
|||
this.socket.disconnect() |
|||
|
|||
if (this.building) { |
|||
this.building = false |
|||
} |
|||
|
|||
this.buf.sort((a, b) => compare(a.log, b.log)) |
|||
|
|||
// Flush all buffer
|
|||
for (const b of this.buf) { |
|||
clearTimeout(b.timer) |
|||
this.printLog(b.log) |
|||
} |
|||
this.buf = [] |
|||
|
|||
this.emit('close') |
|||
} |
|||
|
|||
onSocketError(err) { |
|||
if (this.debug) { |
|||
console.log(`> [debug] Socket error ${err}\n${err.stack}`) |
|||
} |
|||
} |
|||
|
|||
printLog(log) { |
|||
if (this.printed.has(log.id)) return |
|||
|
|||
this.printed.add(log.id) |
|||
|
|||
const data = log.object ? JSON.stringify(log.object) : log.text |
|||
|
|||
if (log.type === 'command') { |
|||
console.log(`${chalk.gray('>')} ▲ ${data}`) |
|||
} else if (log.type === 'stderr') { |
|||
data.split('\n').forEach(v => { |
|||
if (v.length > 0) { |
|||
console.error(chalk.gray(`> ${v}`)) |
|||
} |
|||
}) |
|||
} else if (log.type === 'stdout') { |
|||
data.split('\n').forEach(v => { |
|||
if (v.length > 0) { |
|||
console.log(`${chalk.gray('>')} ${v}`) |
|||
} |
|||
}) |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,49 @@ |
|||
// Native
|
|||
const os = require('os') |
|||
const path = require('path') |
|||
|
|||
const checkPath = async dir => { |
|||
if (!dir) { |
|||
return |
|||
} |
|||
|
|||
const home = os.homedir() |
|||
let location |
|||
|
|||
const paths = { |
|||
home, |
|||
desktop: path.join(home, 'Desktop'), |
|||
downloads: path.join(home, 'Downloads') |
|||
} |
|||
|
|||
for (const locationPath in paths) { |
|||
if (!{}.hasOwnProperty.call(paths, locationPath)) { |
|||
continue |
|||
} |
|||
|
|||
if (dir === paths[locationPath]) { |
|||
location = locationPath |
|||
} |
|||
} |
|||
|
|||
if (!location) { |
|||
return |
|||
} |
|||
|
|||
let locationName |
|||
|
|||
switch (location) { |
|||
case 'home': |
|||
locationName = 'user directory' |
|||
break |
|||
case 'downloads': |
|||
locationName = 'downloads directory' |
|||
break |
|||
default: |
|||
locationName = location |
|||
} |
|||
|
|||
throw new Error(`You're trying to deploy your ${locationName}.`) |
|||
} |
|||
|
|||
module.exports = checkPath |
@ -0,0 +1,91 @@ |
|||
// Packages
|
|||
const ms = require('ms') |
|||
const chalk = require('chalk') |
|||
|
|||
const error = require('../../../util/output/error') |
|||
const info = require('../../../util/output/info') |
|||
|
|||
function handleError(err, { debug = false } = {}) { |
|||
// Coerce Strings to Error instances
|
|||
if (typeof err === 'string') { |
|||
err = new Error(err) |
|||
} |
|||
|
|||
if (debug) { |
|||
console.log(`> [debug] handling error: ${err.stack}`) |
|||
} |
|||
|
|||
if (err.status === 403) { |
|||
console.log( |
|||
error( |
|||
'Authentication error. Run `now -L` or `now --login` to log-in again.' |
|||
) |
|||
) |
|||
} else if (err.status === 429) { |
|||
if (err.retryAfter === 'never') { |
|||
console.log(error(err.message)) |
|||
} else if (err.retryAfter === null) { |
|||
console.log(error('Rate limit exceeded error. Please try later.')) |
|||
} else { |
|||
console.log( |
|||
error( |
|||
'Rate limit exceeded error. Try again in ' + |
|||
ms(err.retryAfter * 1000, { long: true }) + |
|||
', or upgrade your account by running ' + |
|||
`${chalk.gray('`')}${chalk.cyan('now upgrade')}${chalk.gray('`')}` |
|||
) |
|||
) |
|||
} |
|||
} else if (err.userError) { |
|||
console.log(error(err.message)) |
|||
} else if (err.status === 500) { |
|||
console.log(error('Unexpected server error. Please retry.')) |
|||
} else if (err.code === 'USER_ABORT') { |
|||
console.log(info('Aborted')) |
|||
} else { |
|||
console.log( |
|||
error(`Unexpected error. Please try again later. (${err.message})`) |
|||
) |
|||
} |
|||
} |
|||
|
|||
async function responseError(res) { |
|||
let message |
|||
let userError |
|||
|
|||
if (res.status >= 400 && res.status < 500) { |
|||
let body |
|||
|
|||
try { |
|||
body = await res.json() |
|||
} catch (err) { |
|||
body = {} |
|||
} |
|||
|
|||
// Some APIs wrongly return `err` instead of `error`
|
|||
message = (body.error || body.err || {}).message |
|||
userError = true |
|||
} else { |
|||
userError = false |
|||
} |
|||
|
|||
const err = new Error(message || 'Response error') |
|||
err.status = res.status |
|||
err.userError = userError |
|||
|
|||
if (res.status === 429) { |
|||
const retryAfter = res.headers.get('Retry-After') |
|||
|
|||
if (retryAfter) { |
|||
err.retryAfter = parseInt(retryAfter, 10) |
|||
} |
|||
} |
|||
|
|||
return err |
|||
} |
|||
|
|||
module.exports = { |
|||
handleError, |
|||
responseError, |
|||
error |
|||
} |
@ -0,0 +1,385 @@ |
|||
// Native
|
|||
const { resolve } = require('path') |
|||
|
|||
// Packages
|
|||
const flatten = require('arr-flatten') |
|||
const unique = require('array-unique') |
|||
const ignore = require('ignore') |
|||
const _glob = require('glob') |
|||
const { stat, readdir, readFile } = require('fs-extra') |
|||
|
|||
// Ours
|
|||
const IGNORED = require('./ignored') |
|||
|
|||
const glob = async function(pattern, options) { |
|||
return new Promise((resolve, reject) => { |
|||
_glob(pattern, options, (error, files) => { |
|||
if (error) { |
|||
reject(error) |
|||
} else { |
|||
resolve(files) |
|||
} |
|||
}) |
|||
}) |
|||
} |
|||
|
|||
/** |
|||
* Remove leading `./` from the beginning of ignores |
|||
* because our parser doesn't like them :| |
|||
*/ |
|||
|
|||
const clearRelative = function(str) { |
|||
return str.replace(/(\n|^)\.\//g, '$1') |
|||
} |
|||
|
|||
/** |
|||
* Returns the contents of a file if it exists. |
|||
* |
|||
* @return {String} results or `''` |
|||
*/ |
|||
|
|||
const maybeRead = async function(path, default_ = '') { |
|||
try { |
|||
return await readFile(path, 'utf8') |
|||
} catch (err) { |
|||
return default_ |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Transform relative paths into absolutes, |
|||
* and maintains absolutes as such. |
|||
* |
|||
* @param {String} maybe relative path |
|||
* @param {String} parent full path |
|||
*/ |
|||
|
|||
const asAbsolute = function(path, parent) { |
|||
if (path[0] === '/') { |
|||
return path |
|||
} |
|||
|
|||
return resolve(parent, path) |
|||
} |
|||
|
|||
/** |
|||
* Returns a list of files in the given |
|||
* directory that are subject to be |
|||
* synchronized for static deployments. |
|||
* |
|||
* @param {String} full path to directory |
|||
* @param {Object} options: |
|||
* - `limit` {Number|null} byte limit |
|||
* - `debug` {Boolean} warn upon ignore |
|||
* @return {Array} comprehensive list of paths to sync |
|||
*/ |
|||
|
|||
async function staticFiles( |
|||
path, |
|||
nowConfig = {}, |
|||
{ limit = null, hasNowJson = false, debug = false } = {} |
|||
) { |
|||
const whitelist = nowConfig.files |
|||
|
|||
// The package.json `files` whitelist still
|
|||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|||
const search_ = whitelist || ['.'] |
|||
// Convert all filenames into absolute paths
|
|||
const search = Array.prototype.concat.apply( |
|||
[], |
|||
await Promise.all( |
|||
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true })) |
|||
) |
|||
) |
|||
|
|||
// Compile list of ignored patterns and files
|
|||
const gitIgnore = await maybeRead(resolve(path, '.gitignore')) |
|||
|
|||
const filter = ignore() |
|||
.add(IGNORED + '\n' + clearRelative(gitIgnore)) |
|||
.createFilter() |
|||
|
|||
const prefixLength = path.length + 1 |
|||
|
|||
// The package.json `files` whitelist still
|
|||
// honors npmignores: https://docs.npmjs.com/files/package.json#files
|
|||
// but we don't ignore if the user is explicitly listing files
|
|||
// under the now namespace, or using files in combination with gitignore
|
|||
const accepts = file => { |
|||
const relativePath = file.substr(prefixLength) |
|||
|
|||
if (relativePath === '') { |
|||
return true |
|||
} |
|||
|
|||
const accepted = filter(relativePath) |
|||
if (!accepted && debug) { |
|||
console.log('> [debug] ignoring "%s"', file) |
|||
} |
|||
return accepted |
|||
} |
|||
|
|||
// Locate files
|
|||
if (debug) { |
|||
console.time(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
const files = await explode(search, { |
|||
accepts, |
|||
limit, |
|||
debug |
|||
}) |
|||
|
|||
if (debug) { |
|||
console.timeEnd(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
if (hasNowJson) { |
|||
files.push(asAbsolute('now.json', path)) |
|||
} |
|||
|
|||
// Get files
|
|||
return unique(files) |
|||
} |
|||
|
|||
/** |
|||
* Returns a list of files in the given |
|||
* directory that are subject to be |
|||
* synchronized for npm. |
|||
* |
|||
* @param {String} full path to directory |
|||
* @param {String} contents of `package.json` to avoid lookup |
|||
* @param {Object} options: |
|||
* - `limit` {Number|null} byte limit |
|||
* - `debug` {Boolean} warn upon ignore |
|||
* @return {Array} comprehensive list of paths to sync |
|||
*/ |
|||
|
|||
async function npm( |
|||
path, |
|||
pkg = {}, |
|||
nowConfig = {}, |
|||
{ limit = null, hasNowJson = false, debug = false } = {} |
|||
) { |
|||
const whitelist = nowConfig.files || pkg.files || (pkg.now && pkg.now.files) |
|||
|
|||
// The package.json `files` whitelist still
|
|||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|||
const search_ = whitelist || ['.'] |
|||
// Convert all filenames into absolute paths
|
|||
const search = Array.prototype.concat.apply( |
|||
[], |
|||
await Promise.all( |
|||
search_.map(file => glob(file, { cwd: path, absolute: true, dot: true })) |
|||
) |
|||
) |
|||
|
|||
// Compile list of ignored patterns and files
|
|||
const npmIgnore = await maybeRead(resolve(path, '.npmignore'), null) |
|||
const gitIgnore = |
|||
npmIgnore === null ? await maybeRead(resolve(path, '.gitignore')) : null |
|||
|
|||
const filter = ignore() |
|||
.add( |
|||
IGNORED + '\n' + clearRelative(npmIgnore === null ? gitIgnore : npmIgnore) |
|||
) |
|||
.createFilter() |
|||
|
|||
const prefixLength = path.length + 1 |
|||
|
|||
// The package.json `files` whitelist still
|
|||
// honors npmignores: https://docs.npmjs.com/files/package.json#files
|
|||
// but we don't ignore if the user is explicitly listing files
|
|||
// under the now namespace, or using files in combination with gitignore
|
|||
const overrideIgnores = |
|||
(pkg.now && pkg.now.files) || |
|||
nowConfig.files || |
|||
(gitIgnore !== null && pkg.files) |
|||
const accepts = overrideIgnores |
|||
? () => true |
|||
: file => { |
|||
const relativePath = file.substr(prefixLength) |
|||
|
|||
if (relativePath === '') { |
|||
return true |
|||
} |
|||
|
|||
const accepted = filter(relativePath) |
|||
if (!accepted && debug) { |
|||
console.log('> [debug] ignoring "%s"', file) |
|||
} |
|||
return accepted |
|||
} |
|||
|
|||
// Locate files
|
|||
if (debug) { |
|||
console.time(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
const files = await explode(search, { |
|||
accepts, |
|||
limit, |
|||
debug |
|||
}) |
|||
|
|||
if (debug) { |
|||
console.timeEnd(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
// Always include manifest as npm does not allow ignoring it
|
|||
// source: https://docs.npmjs.com/files/package.json#files
|
|||
files.push(asAbsolute('package.json', path)) |
|||
|
|||
if (hasNowJson) { |
|||
files.push(asAbsolute('now.json', path)) |
|||
} |
|||
|
|||
// Get files
|
|||
return unique(files) |
|||
} |
|||
|
|||
/** |
|||
* Returns a list of files in the given |
|||
* directory that are subject to be |
|||
* sent to docker as build context. |
|||
* |
|||
* @param {String} full path to directory |
|||
* @param {String} contents of `Dockerfile` |
|||
* @param {Object} options: |
|||
* - `limit` {Number|null} byte limit |
|||
* - `debug` {Boolean} warn upon ignore |
|||
* @return {Array} comprehensive list of paths to sync |
|||
*/ |
|||
|
|||
async function docker( |
|||
path, |
|||
nowConfig = {}, |
|||
{ limit = null, hasNowJson = false, debug = false } = {} |
|||
) { |
|||
const whitelist = nowConfig.files |
|||
|
|||
// Base search path
|
|||
// the now.json `files` whitelist still
|
|||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
|||
const search_ = whitelist || ['.'] |
|||
|
|||
// Convert all filenames into absolute paths
|
|||
const search = search_.map(file => asAbsolute(file, path)) |
|||
|
|||
// Compile list of ignored patterns and files
|
|||
const dockerIgnore = await maybeRead(resolve(path, '.dockerignore'), null) |
|||
|
|||
const filter = ignore() |
|||
.add( |
|||
IGNORED + |
|||
'\n' + |
|||
clearRelative( |
|||
dockerIgnore === null |
|||
? await maybeRead(resolve(path, '.gitignore')) |
|||
: dockerIgnore |
|||
) |
|||
) |
|||
.createFilter() |
|||
|
|||
const prefixLength = path.length + 1 |
|||
const accepts = function(file) { |
|||
const relativePath = file.substr(prefixLength) |
|||
|
|||
if (relativePath === '') { |
|||
return true |
|||
} |
|||
|
|||
const accepted = filter(relativePath) |
|||
if (!accepted && debug) { |
|||
console.log('> [debug] ignoring "%s"', file) |
|||
} |
|||
return accepted |
|||
} |
|||
|
|||
// Locate files
|
|||
if (debug) { |
|||
console.time(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
const files = await explode(search, { accepts, limit, debug }) |
|||
|
|||
if (debug) { |
|||
console.timeEnd(`> [debug] locating files ${path}`) |
|||
} |
|||
|
|||
// Always include manifest as npm does not allow ignoring it
|
|||
// source: https://docs.npmjs.com/files/package.json#files
|
|||
files.push(asAbsolute('Dockerfile', path)) |
|||
|
|||
if (hasNowJson) { |
|||
files.push(asAbsolute('now.json', path)) |
|||
} |
|||
|
|||
// Get files
|
|||
return unique(files) |
|||
} |
|||
|
|||
/** |
|||
* Explodes directories into a full list of files. |
|||
* Eg: |
|||
* in: ['/a.js', '/b'] |
|||
* out: ['/a.js', '/b/c.js', '/b/d.js'] |
|||
* |
|||
* @param {Array} of {String}s representing paths |
|||
* @param {Array} of ignored {String}s. |
|||
* @param {Object} options: |
|||
* - `limit` {Number|null} byte limit |
|||
* - `debug` {Boolean} warn upon ignore |
|||
* @return {Array} of {String}s of full paths |
|||
*/ |
|||
|
|||
async function explode(paths, { accepts, debug }) { |
|||
const list = async file => { |
|||
let path = file |
|||
let s |
|||
|
|||
if (!accepts(file)) { |
|||
return null |
|||
} |
|||
|
|||
try { |
|||
s = await stat(path) |
|||
} catch (e) { |
|||
// In case the file comes from `files`
|
|||
// and it wasn't specified with `.js` by the user
|
|||
path = file + '.js' |
|||
|
|||
try { |
|||
s = await stat(path) |
|||
} catch (e2) { |
|||
if (debug) { |
|||
console.log('> [debug] ignoring invalid file "%s"', file) |
|||
} |
|||
return null |
|||
} |
|||
} |
|||
|
|||
if (s.isDirectory()) { |
|||
const all = await readdir(file) |
|||
/* eslint-disable no-use-before-define */ |
|||
return many(all.map(subdir => asAbsolute(subdir, file))) |
|||
/* eslint-enable no-use-before-define */ |
|||
} else if (!s.isFile()) { |
|||
if (debug) { |
|||
console.log('> [debug] ignoring special file "%s"', file) |
|||
} |
|||
return null |
|||
} |
|||
|
|||
return path |
|||
} |
|||
|
|||
const many = all => Promise.all(all.map(file => list(file))) |
|||
return flatten(await many(paths)).filter(v => v !== null) |
|||
} |
|||
|
|||
module.exports = { |
|||
npm, |
|||
docker, |
|||
staticFiles |
|||
} |
@ -0,0 +1,221 @@ |
|||
// Native
|
|||
const path = require('path') |
|||
const url = require('url') |
|||
const childProcess = require('child_process') |
|||
|
|||
// Packages
|
|||
const fs = require('fs-extra') |
|||
const download = require('download') |
|||
const tmp = require('tmp-promise') |
|||
const isURL = require('is-url') |
|||
|
|||
const cloneRepo = (parts, tmpDir, { ssh }) => |
|||
new Promise((resolve, reject) => { |
|||
let host |
|||
|
|||
switch (parts.type) { |
|||
case 'GitLab': |
|||
host = `gitlab.com` |
|||
break |
|||
case 'Bitbucket': |
|||
host = `bitbucket.org` |
|||
break |
|||
default: |
|||
host = `github.com` |
|||
} |
|||
|
|||
const url = ssh |
|||
? `git@${host}:${parts.main}` |
|||
: `https://${host}/${parts.main}` |
|||
|
|||
const ref = parts.ref || (parts.type === 'Bitbucket' ? 'default' : 'master') |
|||
const cmd = `git clone ${url} --single-branch ${ref}` |
|||
|
|||
childProcess.exec(cmd, { cwd: tmpDir.path }, (err, stdout) => { |
|||
if (err) { |
|||
reject(err) |
|||
} |
|||
|
|||
resolve(stdout) |
|||
}) |
|||
}) |
|||
|
|||
const renameRepoDir = async (pathParts, tmpDir) => { |
|||
const tmpContents = await fs.readdir(tmpDir.path) |
|||
|
|||
const oldTemp = path.join(tmpDir.path, tmpContents[0]) |
|||
const newTemp = path.join(tmpDir.path, pathParts.main.replace('/', '-')) |
|||
|
|||
await fs.rename(oldTemp, newTemp) |
|||
tmpDir.path = newTemp |
|||
|
|||
return tmpDir |
|||
} |
|||
|
|||
const capitalizePlatform = name => { |
|||
const names = { |
|||
github: 'GitHub', |
|||
gitlab: 'GitLab', |
|||
bitbucket: 'Bitbucket' |
|||
} |
|||
|
|||
return names[name] |
|||
} |
|||
|
|||
const splittedURL = fullURL => { |
|||
const parsedURL = url.parse(fullURL) |
|||
const pathParts = parsedURL.path.split('/') |
|||
|
|||
pathParts.shift() |
|||
|
|||
// Set path to repo...
|
|||
const main = pathParts[0] + '/' + pathParts[1] |
|||
|
|||
// ...and then remove it from the parts
|
|||
pathParts.splice(0, 2) |
|||
|
|||
// Assign Git reference
|
|||
let ref = pathParts.length >= 2 ? pathParts[1] : '' |
|||
|
|||
// Firstly be sure that we haven know the ref type
|
|||
if (pathParts[0]) { |
|||
// Then shorten the SHA of the commit
|
|||
if (pathParts[0] === 'commit' || pathParts[0] === 'commits') { |
|||
ref = ref.substring(0, 7) |
|||
} |
|||
} |
|||
|
|||
// We're deploying master by default,
|
|||
// so there's no need to indicate it explicitly
|
|||
if (ref === 'master') { |
|||
ref = '' |
|||
} |
|||
|
|||
return { |
|||
main, |
|||
ref, |
|||
type: capitalizePlatform(parsedURL.host.split('.')[0]) |
|||
} |
|||
} |
|||
|
|||
const gitPathParts = main => { |
|||
let ref = '' |
|||
|
|||
if (isURL(main)) { |
|||
return splittedURL(main) |
|||
} |
|||
|
|||
if (main.split('/')[1].includes('#')) { |
|||
const parts = main.split('#') |
|||
|
|||
ref = parts[1] |
|||
main = parts[0] |
|||
} |
|||
|
|||
return { |
|||
main, |
|||
ref, |
|||
type: capitalizePlatform('github') |
|||
} |
|||
} |
|||
|
|||
const downloadRepo = async repoPath => { |
|||
const pathParts = gitPathParts(repoPath) |
|||
|
|||
const tmpDir = await tmp.dir({ |
|||
// We'll remove it manually once deployment is done
|
|||
keep: true, |
|||
// Recursively remove directory when calling respective method
|
|||
unsafeCleanup: true |
|||
}) |
|||
|
|||
let gitInstalled = true |
|||
|
|||
try { |
|||
await cloneRepo(pathParts, tmpDir) |
|||
} catch (err) { |
|||
try { |
|||
await cloneRepo(pathParts, tmpDir, { ssh: true }) |
|||
} catch (err) { |
|||
gitInstalled = false |
|||
} |
|||
} |
|||
|
|||
if (gitInstalled) { |
|||
const renaming = await renameRepoDir(pathParts, tmpDir) |
|||
return renaming |
|||
} |
|||
|
|||
let url |
|||
|
|||
switch (pathParts.type) { |
|||
case 'GitLab': { |
|||
const ref = pathParts.ref ? `?ref=${pathParts.ref}` : '' |
|||
url = `https://gitlab.com/${pathParts.main}/repository/archive.tar` + ref |
|||
break |
|||
} |
|||
case 'Bitbucket': |
|||
url = `https://bitbucket.org/${pathParts.main}/get/${pathParts.ref || |
|||
'default'}.zip` |
|||
break |
|||
default: |
|||
url = `https://api.github.com/repos/${pathParts.main}/tarball/${pathParts.ref}` |
|||
} |
|||
|
|||
try { |
|||
await download(url, tmpDir.path, { |
|||
extract: true |
|||
}) |
|||
} catch (err) { |
|||
tmpDir.cleanup() |
|||
return false |
|||
} |
|||
|
|||
const renaming = await renameRepoDir(pathParts, tmpDir) |
|||
return renaming |
|||
} |
|||
|
|||
const isRepoPath = path => { |
|||
if (!path) { |
|||
return false |
|||
} |
|||
|
|||
const allowedHosts = ['github.com', 'gitlab.com', 'bitbucket.org'] |
|||
|
|||
if (isURL(path)) { |
|||
const urlParts = url.parse(path) |
|||
const slashSplitted = urlParts.path.split('/').filter(n => n) |
|||
const notBare = slashSplitted.length >= 2 |
|||
|
|||
if (allowedHosts.includes(urlParts.host) && notBare) { |
|||
return true |
|||
} |
|||
|
|||
const err = new Error(`Host "${urlParts.host}" is unsupported.`) |
|||
err.code = 'INVALID_URL' |
|||
err.userError = true |
|||
throw err |
|||
} |
|||
|
|||
return /[^\s\\]\/[^\s\\]/g.test(path) |
|||
} |
|||
|
|||
const fromGit = async (path, debug) => { |
|||
let tmpDir = false |
|||
|
|||
try { |
|||
tmpDir = await downloadRepo(path) |
|||
} catch (err) { |
|||
if (debug) { |
|||
console.log(`Could not download "${path}" repo from GitHub`) |
|||
} |
|||
} |
|||
|
|||
return tmpDir |
|||
} |
|||
|
|||
module.exports = { |
|||
gitPathParts, |
|||
isRepoPath, |
|||
fromGit |
|||
} |
@ -0,0 +1,44 @@ |
|||
// Native
|
|||
const { createHash } = require('crypto') |
|||
|
|||
// Packages
|
|||
const { readFile } = require('fs-extra') |
|||
|
|||
/** |
|||
* Computes hashes for the contents of each file given. |
|||
* |
|||
* @param {Array} of {String} full paths |
|||
* @return {Map} |
|||
*/ |
|||
|
|||
async function hashes(files) { |
|||
const map = new Map() |
|||
|
|||
await Promise.all( |
|||
files.map(async name => { |
|||
const data = await readFile(name) |
|||
|
|||
const h = hash(data) |
|||
const entry = map.get(h) |
|||
if (entry) { |
|||
entry.names.push(name) |
|||
} else { |
|||
map.set(hash(data), { names: [name], data }) |
|||
} |
|||
}) |
|||
) |
|||
return map |
|||
} |
|||
|
|||
/** |
|||
* Computes a hash for the given buf. |
|||
* |
|||
* @param {Buffer} file data |
|||
* @return {String} hex digest |
|||
*/ |
|||
|
|||
function hash(buf) { |
|||
return createHash('sha1').update(buf).digest('hex') |
|||
} |
|||
|
|||
module.exports = hashes |
@ -0,0 +1,17 @@ |
|||
// Base `.gitignore` to which we add entries
|
|||
// supplied by the user
|
|||
module.exports = `.hg
|
|||
.git |
|||
.gitmodules |
|||
.svn |
|||
.npmignore |
|||
.dockerignore |
|||
.gitignore |
|||
.*.swp |
|||
.DS_Store |
|||
.wafpicke-* |
|||
.lock-wscript |
|||
npm-debug.log |
|||
config.gypi |
|||
node_modules |
|||
CVS` |
@ -0,0 +1,14 @@ |
|||
exports.compare = function(a, b) { |
|||
return ( |
|||
a.serial.localeCompare(b.serial) || |
|||
// For the case serials are a same value on old logs
|
|||
a.created.getTime() - b.created.getTime() |
|||
) |
|||
} |
|||
|
|||
exports.deserialize = function(log) { |
|||
return Object.assign({}, log, { |
|||
date: new Date(log.date), |
|||
created: new Date(log.created) |
|||
}) |
|||
} |
File diff suppressed because it is too large
@ -0,0 +1,57 @@ |
|||
const ms = require('ms') |
|||
|
|||
const Now = require('./now') |
|||
|
|||
async function parsePlan(json) { |
|||
const { subscription } = json |
|||
let id |
|||
let until |
|||
let name |
|||
|
|||
if (subscription) { |
|||
const planItems = subscription.items.data |
|||
const mainPlan = planItems.find(d => d.plan.metadata.is_main_plan === '1') |
|||
|
|||
if (mainPlan) { |
|||
id = mainPlan.plan.id |
|||
name = mainPlan.plan.name |
|||
if (subscription.cancel_at_period_end) { |
|||
until = ms( |
|||
new Date(subscription.current_period_end * 1000) - new Date(), |
|||
{ long: true } |
|||
) |
|||
} |
|||
} else { |
|||
id = 'oss' |
|||
} |
|||
} else { |
|||
id = 'oss' |
|||
} |
|||
|
|||
return { id, name, until } |
|||
} |
|||
|
|||
module.exports = class Plans extends Now { |
|||
async getCurrent() { |
|||
const res = await this._fetch('/plan') |
|||
const json = await res.json() |
|||
return parsePlan(json) |
|||
} |
|||
|
|||
async set(plan) { |
|||
const res = await this._fetch('/plan', { |
|||
method: 'PUT', |
|||
body: { plan } |
|||
}) |
|||
|
|||
const json = await res.json() |
|||
|
|||
if (res.ok) { |
|||
return parsePlan(json) |
|||
} |
|||
|
|||
const err = new Error(json.error.message) |
|||
err.code = json.error.code |
|||
throw err |
|||
} |
|||
} |
@ -0,0 +1,39 @@ |
|||
// Packages
|
|||
const chalk = require('chalk') |
|||
|
|||
module.exports = promptOptions |
|||
|
|||
function promptOptions(opts) { |
|||
return new Promise((resolve, reject) => { |
|||
opts.forEach(([, text], i) => { |
|||
console.log(`${chalk.gray('>')} [${chalk.bold(i + 1)}] ${text}`) |
|||
}) |
|||
|
|||
const ondata = v => { |
|||
const s = v.toString() |
|||
|
|||
const cleanup = () => { |
|||
process.stdin.setRawMode(false) |
|||
process.stdin.removeListener('data', ondata) |
|||
} |
|||
|
|||
// Ctrl + C
|
|||
if (s === '\u0003') { |
|||
cleanup() |
|||
const err = new Error('Aborted') |
|||
err.code = 'USER_ABORT' |
|||
return reject(err) |
|||
} |
|||
|
|||
const n = Number(s) |
|||
if (opts[n - 1]) { |
|||
cleanup() |
|||
resolve(opts[n - 1][0]) |
|||
} |
|||
} |
|||
|
|||
process.stdin.setRawMode(true) |
|||
process.stdin.resume() |
|||
process.stdin.on('data', ondata) |
|||
}) |
|||
} |
@ -0,0 +1,195 @@ |
|||
// Native
|
|||
const { basename, resolve: resolvePath } = require('path') |
|||
|
|||
// Packages
|
|||
const chalk = require('chalk') |
|||
const { readFile } = require('fs-extra') |
|||
const { parse: parseDockerfile } = require('docker-file-parser') |
|||
const determineType = require('deployment-type') |
|||
|
|||
module.exports = readMetaData |
|||
|
|||
async function readMetaData( |
|||
path, |
|||
{ |
|||
deploymentType, |
|||
deploymentName, |
|||
sessionAffinity, |
|||
quiet = false, |
|||
strict = true |
|||
} |
|||
) { |
|||
let description |
|||
let type = deploymentType |
|||
let name = deploymentName |
|||
let affinity = sessionAffinity |
|||
|
|||
const pkg = await readJSON(path, 'package.json') |
|||
let nowConfig = await readJSON(path, 'now.json') |
|||
const dockerfile = await readDockerfile(path) |
|||
|
|||
const hasNowJson = Boolean(nowConfig) |
|||
|
|||
if (pkg && pkg.now) { |
|||
// If the project has both a `now.json` and `now` Object in the `package.json`
|
|||
// file, then fail hard and let the user know that they need to pick one or the
|
|||
// other
|
|||
if (nowConfig) { |
|||
const err = new Error( |
|||
'You have a `now` configuration field inside `package.json` ' + |
|||
'but configuration is also present in `now.json`! ' + |
|||
"Please ensure there's a single source of configuration by removing one." |
|||
) |
|||
err.userError = true |
|||
throw err |
|||
} else { |
|||
nowConfig = pkg.now |
|||
} |
|||
} |
|||
|
|||
// We can remove this once the prompt for choosing `--npm` or `--docker` is gone
|
|||
if (pkg && pkg.now && pkg.now.type) { |
|||
type = nowConfig.type |
|||
} |
|||
|
|||
// The same goes for this
|
|||
if (nowConfig && nowConfig.type) { |
|||
type = nowConfig.type |
|||
} |
|||
|
|||
if (!type) { |
|||
type = await determineType(path) |
|||
|
|||
// Both `package.json` and `Dockerfile` exist! Prompt the user to pick one.
|
|||
// We can remove this soon (details are internal) - also read the comment paragraph above
|
|||
if (type === 'docker' && (pkg && dockerfile)) { |
|||
const err = new Error( |
|||
'Ambiguous deployment (`package.json` and `Dockerfile` found). ' + |
|||
'Please supply `--npm` or `--docker` to disambiguate.' |
|||
) |
|||
|
|||
err.userError = true |
|||
err.code = 'MULTIPLE_MANIFESTS' |
|||
|
|||
throw err |
|||
} |
|||
} |
|||
|
|||
if (!name && nowConfig) { |
|||
name = nowConfig.name |
|||
} |
|||
|
|||
if (!affinity && nowConfig) { |
|||
affinity = nowConfig.sessionAffinity |
|||
} |
|||
|
|||
if (type === 'npm') { |
|||
if (pkg) { |
|||
if (!name && pkg.now && pkg.now.name) { |
|||
name = String(pkg.now.name) |
|||
} |
|||
|
|||
if (!name && pkg.name) { |
|||
name = String(pkg.name) |
|||
} |
|||
|
|||
description = pkg.description |
|||
} |
|||
} else if (type === 'docker') { |
|||
if (strict && dockerfile.length <= 0) { |
|||
const err = new Error('No commands found in `Dockerfile`') |
|||
err.userError = true |
|||
|
|||
throw err |
|||
} |
|||
|
|||
const labels = {} |
|||
|
|||
dockerfile.filter(cmd => cmd.name === 'LABEL').forEach(({ args }) => { |
|||
for (const key in args) { |
|||
if (!{}.hasOwnProperty.call(args, key)) { |
|||
continue |
|||
} |
|||
|
|||
// Unescape and convert into string
|
|||
try { |
|||
labels[key] = args[key] |
|||
} catch (err) { |
|||
const e = new Error( |
|||
`Error parsing value for LABEL ${key} in \`Dockerfile\`` |
|||
) |
|||
|
|||
e.userError = true |
|||
throw e |
|||
} |
|||
} |
|||
}) |
|||
|
|||
if (!name) { |
|||
name = labels.name |
|||
} |
|||
|
|||
description = labels.description |
|||
} else if (type === 'static') { |
|||
// Do nothing
|
|||
} else { |
|||
throw new TypeError(`Unsupported "deploymentType": ${type}`) |
|||
} |
|||
|
|||
// No name in `package.json` / `now.json`, or "name" label in Dockerfile.
|
|||
// Default to the basename of the root dir
|
|||
if (!name) { |
|||
name = basename(path) |
|||
|
|||
if (!quiet && type !== 'static') { |
|||
if (type === 'docker') { |
|||
console.log( |
|||
`> No \`name\` LABEL in \`Dockerfile\`, using ${chalk.bold(name)}` |
|||
) |
|||
} else { |
|||
console.log( |
|||
`> No \`name\` in \`package.json\`, using ${chalk.bold(name)}` |
|||
) |
|||
} |
|||
} |
|||
} |
|||
|
|||
return { |
|||
name, |
|||
description, |
|||
type, |
|||
pkg, |
|||
nowConfig, |
|||
hasNowJson, |
|||
|
|||
// XXX: legacy
|
|||
deploymentType: type, |
|||
sessionAffinity: affinity |
|||
} |
|||
} |
|||
|
|||
async function readJSON(path, name) { |
|||
try { |
|||
const contents = await readFile(resolvePath(path, name), 'utf8') |
|||
return JSON.parse(contents) |
|||
} catch (err) { |
|||
// If the file doesn't exist then that's fine; any other error bubbles up
|
|||
if (err.code !== 'ENOENT') { |
|||
err.userError = true |
|||
throw err |
|||
} |
|||
} |
|||
} |
|||
|
|||
async function readDockerfile(path, name = 'Dockerfile') { |
|||
try { |
|||
const contents = await readFile(resolvePath(path, name), 'utf8') |
|||
return parseDockerfile(contents, { includeComments: true }) |
|||
} catch (err) { |
|||
// If the file doesn't exist then that's fine; any other error bubbles up
|
|||
if (err.code !== 'ENOENT') { |
|||
err.userError = true |
|||
throw err |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,20 @@ |
|||
// Native
|
|||
const { parse } = require('url') |
|||
|
|||
/** |
|||
* Converts a valid deployment lookup parameter to a hostname. |
|||
* `http://google.com` => google.com |
|||
* google.com => google.com |
|||
*/ |
|||
|
|||
function toHost(url) { |
|||
if (/^https?:\/\//.test(url)) { |
|||
return parse(url).host |
|||
} |
|||
|
|||
// Remove any path if present
|
|||
// `a.b.c/` => `a.b.c`
|
|||
return url.replace(/(\/\/)?([^/]+)(.*)/, '$2') |
|||
} |
|||
|
|||
module.exports = toHost |
@ -0,0 +1,290 @@ |
|||
// node
|
|||
const { stringify: stringifyQuery } = require('querystring') |
|||
const { platform, arch, hostname } = require('os') |
|||
|
|||
// theirs
|
|||
const fetch = require('node-fetch') |
|||
const debug = require('debug')('now:sh:login') |
|||
const promptEmail = require('email-prompt') |
|||
const ms = require('ms') |
|||
const { validate: validateEmail } = require('email-validator') |
|||
|
|||
// ours
|
|||
const { version } = require('./util/pkg') |
|||
const ua = require('./util/ua') |
|||
const error = require('../../util/output/error') |
|||
const aborted = require('../../util/output/aborted') |
|||
const wait = require('../../util/output/wait') |
|||
const highlight = require('../../util/output/highlight') |
|||
const info = require('../../util/output/info') |
|||
const ok = require('../../util/output/ok') |
|||
const cmd = require('../../util/output/cmd') |
|||
const ready = require('../../util/output/ready') |
|||
const param = require('../../util/output/param') |
|||
const eraseLines = require('../../util/output/erase-lines') |
|||
const sleep = require('../../util/sleep') |
|||
const getUser = require('./util/get-user') |
|||
const { |
|||
writeToAuthConfigFile, |
|||
writeToConfigFile |
|||
} = require('../../util/config-files') |
|||
const getNowDir = require('../../get-now-dir') |
|||
const hp = require('../../util/humanize-path') |
|||
|
|||
// POSTs to /now/registration – either creates an account or performs a login
|
|||
// returns {token, securityCode}
|
|||
// token: should be used to verify the status of the login process
|
|||
// securityCode: will be sent to the user in the email body
|
|||
const getVerificationData = async ({ apiUrl, email }) => { |
|||
const tokenName = `Now CLI ${version} – ${platform()}-${arch()} (${hostname()})` |
|||
const data = JSON.stringify({ email, tokenName }) |
|||
|
|||
debug('POST /now/registration') |
|||
let res |
|||
try { |
|||
res = await fetch(`${apiUrl}/now/registration`, { |
|||
method: 'POST', |
|||
headers: { |
|||
'Content-Type': 'application/json', |
|||
'Content-Length': Buffer.byteLength(data), |
|||
'User-Agent': ua |
|||
}, |
|||
body: data |
|||
}) |
|||
} catch (err) { |
|||
debug('error fetching /now/registration: %O', err.stack) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to login: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
debug('parsing response from POST /now/registration') |
|||
|
|||
let body |
|||
try { |
|||
body = await res.json() |
|||
} catch (err) { |
|||
debug( |
|||
`error parsing the response from /now/registration as JSON – got %O`, |
|||
err.stack |
|||
) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to log in: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
return body |
|||
} |
|||
|
|||
const verify = async ({ apiUrl, email, verificationToken }) => { |
|||
const query = { |
|||
email, |
|||
token: verificationToken |
|||
} |
|||
|
|||
debug('GET /now/registration/verify') |
|||
|
|||
let res |
|||
try { |
|||
res = await fetch( |
|||
`${apiUrl}/now/registration/verify?${stringifyQuery(query)}`, |
|||
{ |
|||
headers: { 'User-Agent': ua } |
|||
} |
|||
) |
|||
} catch (err) { |
|||
debug(`error fetching /now/registration/verify: $O`, err.stack) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to verify your login: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
debug('parsing response from GET /now/registration/verify') |
|||
|
|||
let body |
|||
try { |
|||
body = await res.json() |
|||
} catch (err) { |
|||
debug( |
|||
`error parsing the response from /now/registration/verify: $O`, |
|||
err.stack |
|||
) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to verify your login: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
return body.token |
|||
} |
|||
|
|||
const readEmail = async () => { |
|||
let email |
|||
try { |
|||
email = await promptEmail({ start: info('Enter your email: ') }) |
|||
} catch (err) { |
|||
console.log() // \n
|
|||
if (err.message === 'User abort') { |
|||
throw new Error(aborted('No changes made.')) |
|||
} |
|||
if (err.message === 'stdin lacks setRawMode support') { |
|||
throw new Error( |
|||
error( |
|||
`Interactive mode not supported – please run ${cmd( |
|||
'now login you@domain.com' |
|||
)}` |
|||
) |
|||
) |
|||
} |
|||
} |
|||
console.log() // \n
|
|||
return email |
|||
} |
|||
|
|||
// TODO open issues: .co, error messages
|
|||
|
|||
const login = async ctx => { |
|||
const { argv } = ctx |
|||
const apiUrl = |
|||
(ctx.config.sh && ctx.config.sh.apiUrl) || 'https://api.zeit.co' |
|||
let email |
|||
let emailIsValid = false |
|||
let stopSpinner |
|||
|
|||
// node file sh login [email|help]
|
|||
const argvHasSh = argv[2] === 'sh' |
|||
const allowedNumberOfArgs = argvHasSh ? 5 : 4 |
|||
if (argv.length > allowedNumberOfArgs) { |
|||
const _cmd = argvHasSh ? 'now sh login' : 'now login' |
|||
console.log(error(`Invalid number of arguments for ${cmd(_cmd)}`)) |
|||
console.log(info(`See ${cmd(_cmd + ' help')}`)) |
|||
return 1 |
|||
} |
|||
|
|||
const maybeEmail = argv[argv.length - 1] |
|||
|
|||
// if the last arg is not the command itself, then maybe it's an email
|
|||
if (maybeEmail !== 'login') { |
|||
if (!validateEmail(maybeEmail)) { |
|||
// if it's not a valid email, let's just error
|
|||
console.log(error(`Invalid email: ${param(maybeEmail)}.`)) |
|||
return 1 |
|||
} |
|||
// valid email, no need to prompt the user
|
|||
email = maybeEmail |
|||
} else { |
|||
do { |
|||
try { |
|||
email = await readEmail() |
|||
} catch (err) { |
|||
let erase = '' |
|||
if (err.message.includes('Aborted')) { |
|||
// no need to keep the prompt if the user `ctrl+c`ed
|
|||
erase = eraseLines(2) |
|||
} |
|||
console.log(erase + err.message) |
|||
return 1 |
|||
} |
|||
emailIsValid = validateEmail(email) |
|||
if (!emailIsValid) { |
|||
// let's erase the `> Enter email [...]`
|
|||
// we can't use `console.log()` because it appends a `\n`
|
|||
// we need this check because `email-prompt` doesn't print
|
|||
// anything if there's no TTY
|
|||
process.stdout.write(eraseLines(2)) |
|||
} |
|||
} while (!emailIsValid) |
|||
} |
|||
|
|||
let verificationToken |
|||
let securityCode |
|||
stopSpinner = wait('Sending you an email') |
|||
try { |
|||
const data = await getVerificationData({ apiUrl, email }) |
|||
verificationToken = data.token |
|||
securityCode = data.securityCode |
|||
} catch (err) { |
|||
stopSpinner() |
|||
console.log(err.message) |
|||
return 1 |
|||
} |
|||
|
|||
stopSpinner() |
|||
|
|||
// prettier-ignore
|
|||
console.log(info( |
|||
`We sent an email to ${highlight(email)}. Please follow the steps provided`, |
|||
` in it and make sure the security code matches ${highlight(securityCode)}.` |
|||
)) |
|||
|
|||
stopSpinner = wait('Waiting for your confirmation') |
|||
|
|||
let token |
|||
|
|||
while (!token) { |
|||
try { |
|||
await sleep(ms('1s')) |
|||
token = await verify({ apiUrl, email, verificationToken }) |
|||
} catch (err) { |
|||
if (/invalid json response body/.test(err.message)) { |
|||
// /now/registraton is currently returning plain text in that case
|
|||
// we just wait for the user to click on the link
|
|||
} else { |
|||
stopSpinner() |
|||
console.log(err.message) |
|||
return 1 |
|||
} |
|||
} |
|||
} |
|||
|
|||
stopSpinner() |
|||
console.log(ok('Email confirmed.')) |
|||
|
|||
stopSpinner = wait('Feching your personal details') |
|||
let user |
|||
try { |
|||
user = await getUser({ apiUrl, token }) |
|||
} catch (err) { |
|||
stopSpinner() |
|||
console.log(err) |
|||
return 1 |
|||
} |
|||
|
|||
const index = ctx.authConfig.credentials.findIndex(c => c.provider === 'sh') |
|||
const obj = { provider: 'sh', token } |
|||
if (index === -1) { |
|||
// wasn't logged in before
|
|||
ctx.authConfig.credentials.push(obj) |
|||
} else { |
|||
// let's just replace the existing object
|
|||
ctx.authConfig.credentials[index] = obj |
|||
} |
|||
|
|||
// NOTE: this will override any existing config for `sh`
|
|||
ctx.config.sh = { user } |
|||
|
|||
writeToAuthConfigFile(ctx.authConfig) |
|||
writeToConfigFile(ctx.config) |
|||
|
|||
stopSpinner() |
|||
console.log(ok('Fetched your personal details.')) |
|||
|
|||
console.log( |
|||
ready( |
|||
`Authentication token and personal details saved in ${param( |
|||
hp(getNowDir()) |
|||
)}` |
|||
) |
|||
) |
|||
|
|||
return ctx |
|||
} |
|||
|
|||
module.exports = login |
@ -0,0 +1,54 @@ |
|||
// theirs
|
|||
const fetch = require('node-fetch') |
|||
const debug = require('debug')('now:sh:get-user') |
|||
|
|||
// ours
|
|||
const error = require('../../../util/output/error') |
|||
|
|||
const getUser = async ({ apiUrl, token }) => { |
|||
debug('start') |
|||
const url = apiUrl + '/www/user' |
|||
const headers = { |
|||
Authorization: `Bearer ${token}` |
|||
} |
|||
|
|||
debug('GET /www/user') |
|||
|
|||
let res |
|||
try { |
|||
res = await fetch(url, { headers }) |
|||
} catch (err) { |
|||
debug(`error fetching /www/user: $O`, err.stack) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to fetch your personal details: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
debug('parsing response from GET /www/user') |
|||
|
|||
let body |
|||
try { |
|||
body = await res.json() |
|||
} catch (err) { |
|||
debug( |
|||
`error parsing the response from /www/user as JSON – got %O`, |
|||
err.stack |
|||
) |
|||
throw new Error( |
|||
error( |
|||
`An unexpected error occurred while trying to fetch your personal details: ${err.message}` |
|||
) |
|||
) |
|||
} |
|||
|
|||
const { user } = body |
|||
|
|||
// this is pretty much useless
|
|||
delete user.billingChecked |
|||
|
|||
return user |
|||
} |
|||
|
|||
module.exports = getUser |
@ -0,0 +1,11 @@ |
|||
const path = require('path') |
|||
const pkg = require('../../../../package.json') |
|||
|
|||
try { |
|||
const distDir = path.dirname(process.execPath) |
|||
pkg._npmPkg = require(path.join(distDir, '../../package.json')) |
|||
} catch (err) { |
|||
pkg._npmPkg = null |
|||
} |
|||
|
|||
module.exports = pkg |
@ -0,0 +1,7 @@ |
|||
// node
|
|||
const os = require('os') |
|||
|
|||
// ours
|
|||
const { version } = require('./pkg') |
|||
|
|||
module.exports = `now ${version} node-${process.version} ${os.platform()} (${os.arch()})` |
@ -0,0 +1,28 @@ |
|||
const resolvers = require('./resolvers') |
|||
const resolverNames = Object.keys(resolvers) |
|||
|
|||
const resolve = async (param, opts) => { |
|||
for (const name of resolverNames) { |
|||
const resolver = resolvers[name] |
|||
let resolved |
|||
|
|||
// give the caller the ability to create
|
|||
// nicer errors by attaching the resolver name
|
|||
try { |
|||
resolved = await resolver(param, opts) |
|||
} catch (err) { |
|||
err.resolverName = name |
|||
throw err |
|||
} |
|||
|
|||
if (resolved !== null) { |
|||
return resolved |
|||
} |
|||
// otherwise continue onto the next resolver
|
|||
// note: if a resolver throws, we consider that
|
|||
// unexpected. a resolver should return `null`
|
|||
// when the parameter is unresolvable instead
|
|||
} |
|||
return null |
|||
} |
|||
module.exports = resolve |
@ -0,0 +1,13 @@ |
|||
const { exists } = require('fs.promised') |
|||
const { resolve } = require('path') |
|||
|
|||
const fsResolver = async (param, { cwd = process.cwd() } = {}) => { |
|||
const resolved = resolve(cwd, param) |
|||
if (await exists(resolved)) { |
|||
return resolved |
|||
} else { |
|||
return null |
|||
} |
|||
} |
|||
|
|||
module.exports = fsResolver |
@ -0,0 +1,100 @@ |
|||
//@flow
|
|||
const { tmpdir } = require('os') |
|||
const { parse, format } = require('url') |
|||
const fetch = require('node-fetch') |
|||
const tar = require('tar-fs') |
|||
const pipeStreams = require('pipe-streams-to-promise') |
|||
const { mkdir } = require('fs.promised') |
|||
const uid = require('uid-promise') |
|||
const { createGunzip } = require('zlib') |
|||
const { join } = require('path') |
|||
const debug = require('debug')('now:resolvers:github') |
|||
|
|||
// matches a parameter that can be `now`d like zeit/now#master
|
|||
const DEPLOY_PARAM_REGEX = /^([\w-]+)\/([\w-]+)(#\w+)?$/ |
|||
|
|||
// matches whether the parameter could be a github url
|
|||
const GITHUB_TEST_REGEX = /^(https?:\/\/)(www\.)?github\.com/ |
|||
|
|||
// matches a github url pathname like: zeit/now/tree/master
|
|||
const URL_PATHNAME_REGEX = /^\/([\w-]+)\/([\w-]+)(\/tree\/(\w+))?$/ |
|||
|
|||
const resolveGitHub = param => { |
|||
// support simple `user/repo` syntax
|
|||
const match = param.match(DEPLOY_PARAM_REGEX) |
|||
if (match) { |
|||
const [, user, repo, tree = 'master'] = match |
|||
return resolveGitHubByURL(`https://github.com/${user}/${repo}/tree/${tree}`) |
|||
} else if (GITHUB_TEST_REGEX.test(param)) { |
|||
return resolveGitHubByURL(param) |
|||
} else { |
|||
return null |
|||
} |
|||
} |
|||
|
|||
const resolveGitHubByURL = async (url: string) => { |
|||
debug('resolving %s by github url', url) |
|||
if (/^https?/.test(url)) { |
|||
const parsed = parse(url) |
|||
if (parsed.hostname === 'github.com') { |
|||
const httpsUrl = |
|||
'https:' === parsed.protocol ? url : format(Object.assign({}, parsed)) |
|||
const res = await fetch(httpsUrl) |
|||
if (res.ok) { |
|||
debug('attempting github clone') |
|||
const { pathname } = parsed |
|||
const match = pathname.match(URL_PATHNAME_REGEX) |
|||
if (match) { |
|||
const [, user, repo, , tree] = match |
|||
const downloadURL = format({ |
|||
protocol: 'https:', |
|||
hostname: 'codeload.github.com', |
|||
pathname: `/${user}/${repo}/tar.gz/${tree}` |
|||
}) |
|||
debug('fetching download url', downloadURL) |
|||
const downloadRes = await fetch(downloadURL, { compress: false }) |
|||
if (downloadRes.ok) { |
|||
const tmpDir = join(tmpdir(), `now-gh-${await uid(20)}`) |
|||
debug('creating tmp dir to extract', tmpDir) |
|||
try { |
|||
await mkdir(tmpDir) |
|||
} catch (err) { |
|||
throw new Error( |
|||
'Error occurred while trying to extract ' + |
|||
`GH tarball to tmp directory ${tmpDir}: ${err.stack}` |
|||
) |
|||
} |
|||
debug('unzipping and untarring stream') |
|||
await pipeStreams([ |
|||
downloadRes.body, |
|||
createGunzip(), |
|||
tar.extract(tmpDir) |
|||
]) |
|||
// instead of stripping a directory upon untar,
|
|||
// we return the full path to the extracted project,
|
|||
// so that now can take advantage of the name
|
|||
return join(tmpDir, `${repo}-${tree}`) |
|||
} else { |
|||
throw new Error( |
|||
'An HTTP error ${res.status} was returned ' + |
|||
`by "${downloadURL}"` |
|||
) |
|||
} |
|||
} else { |
|||
debug('invalid github project url') |
|||
return null |
|||
} |
|||
} else { |
|||
debug('non-200 from github (%d)', res.status) |
|||
return null |
|||
} |
|||
} else { |
|||
debug('skipping non-github hostname') |
|||
return null |
|||
} |
|||
} else { |
|||
return null |
|||
} |
|||
} |
|||
|
|||
module.exports = resolveGitHub |
@ -0,0 +1,4 @@ |
|||
module.exports = { |
|||
fs: require('./fs'), |
|||
github: require('./github') |
|||
} |
@ -0,0 +1,4 @@ |
|||
# serverless utilities |
|||
|
|||
This directory contains a utilities that are useful and reusable |
|||
across different FaaS providers. |
@ -0,0 +1,7 @@ |
|||
const builders = require('./builders') |
|||
|
|||
const build = (dir, desc, opts) => { |
|||
return builders[desc.type](dir, desc, opts) |
|||
} |
|||
|
|||
module.exports = build |
@ -0,0 +1,11 @@ |
|||
module.exports = { |
|||
get nodejs() { |
|||
return require('./nodejs') |
|||
}, |
|||
get static() { |
|||
return require('./static') |
|||
}, |
|||
get go() { |
|||
return require('./go') |
|||
} |
|||
} |
@ -0,0 +1,92 @@ |
|||
const { tmpdir } = require('os') |
|||
const { join } = require('path') |
|||
const { mkdir, stat, link, exists, readdir } = require('fs.promised') |
|||
const uid = require('uid-promise') |
|||
const { exec: exec_ } = require('child_process') |
|||
const { toBuffer } = require('convert-stream') |
|||
const archiver = require('archiver') |
|||
const debug = require('debug')('now:serverless:builders:nodejs') |
|||
const exec = require('util').promisify(exec_) |
|||
|
|||
const nodejsBuilder = async (dir, desc, { overrides = {} } = {}) => { |
|||
const files = await readdir(dir) |
|||
const tmpDirName = `now-nodejs-build-${await uid(20)}` |
|||
const targetPath = join(tmpdir(), tmpDirName) |
|||
|
|||
debug('init nodejs project build stage in', targetPath) |
|||
await mkdir(targetPath) |
|||
|
|||
// produce hard links of the source files in the target dir
|
|||
await Promise.all( |
|||
files |
|||
.filter(name => name !== 'node_modules' && !(name in overrides)) |
|||
.map(file => { |
|||
debug('making hard link for %s', file) |
|||
return link(join(dir, file), join(targetPath, file)) |
|||
}) |
|||
) |
|||
|
|||
const archive = archiver('zip') |
|||
|
|||
// trigger an install if needed
|
|||
if (desc.packageJSON) { |
|||
let buildCommand = '' |
|||
|
|||
if (await exists(join(targetPath, 'package-lock.json'))) { |
|||
buildCommand = 'npm install' |
|||
} else if (await exists(join(targetPath, 'yarn.lock'))) { |
|||
buildCommand = 'yarn install' |
|||
} else { |
|||
buildCommand = 'npm install' |
|||
} |
|||
|
|||
try { |
|||
debug('executing %s in %s', buildCommand, targetPath) |
|||
await exec(buildCommand, { |
|||
cwd: targetPath, |
|||
env: Object.assign({}, process.env, { |
|||
// we set this so that we make the installers ignore
|
|||
// dev dependencies. in the future, we can add a flag
|
|||
// to ignore this behavior, or set different envs
|
|||
NODE_ENV: 'production' |
|||
}) |
|||
}) |
|||
} catch (err) { |
|||
throw new Error( |
|||
`The build command ${buildCommand} failed for ${dir}: ${err.message}` |
|||
) |
|||
} |
|||
} else { |
|||
debug('ignoring build step, no manifests found') |
|||
} |
|||
|
|||
const buffer = toBuffer(archive) |
|||
|
|||
archive.on('warning', err => { |
|||
console.error('Warning while creating zip file', err) |
|||
}) |
|||
|
|||
for (const name in overrides) { |
|||
archive.append(overrides[name], { name }) |
|||
} |
|||
|
|||
// we read again to get the results of the build process
|
|||
const filesToZip = await readdir(targetPath) |
|||
await Promise.all( |
|||
filesToZip.map(async file => { |
|||
const path = join(targetPath, file) |
|||
const stats = await stat(path) |
|||
debug('adding', path) |
|||
return stats.isDirectory() |
|||
? archive.directory(path, file, { stats }) |
|||
: archive.file(path, { name: file, stats }) |
|||
}) |
|||
) |
|||
|
|||
archive.finalize() |
|||
|
|||
// buffer promise
|
|||
return buffer |
|||
} |
|||
|
|||
module.exports = nodejsBuilder |
@ -0,0 +1,29 @@ |
|||
// @flow
|
|||
const { readFileSync } = require('fs') |
|||
const { join } = require('path') |
|||
const handler = readFileSync(join(__dirname, 'handler.js')).toString() |
|||
|
|||
// symbols to replace in the meta-source
|
|||
const CMD_SYMBOL = '/*NOW_CMD*/' |
|||
const SCRIPT_SYMBOL = '/*NOW_SCRIPT*/' |
|||
const REQ_HANDLER_SYMBOL = '/*PROXY_REQUEST_SOURCE*/' |
|||
|
|||
if (handler.indexOf(CMD_SYMBOL) < 0) { |
|||
throw new Error('Missing symbol in `handler.js`: ' + CMD_SYMBOL) |
|||
} |
|||
|
|||
if (handler.indexOf(SCRIPT_SYMBOL) < 0) { |
|||
throw new Error('Missing symbol in `handler.js`: ' + SCRIPT_SYMBOL) |
|||
} |
|||
|
|||
if (handler.indexOf(REQ_HANDLER_SYMBOL) < 0) { |
|||
throw new Error('Missing symbol in `handler.js`: ' + REQ_HANDLER_SYMBOL) |
|||
} |
|||
|
|||
const getHandler = ({ cmd, script }, fn: Function) => |
|||
handler |
|||
.replace(CMD_SYMBOL, JSON.stringify(cmd)) |
|||
.replace(SCRIPT_SYMBOL, JSON.stringify(script)) |
|||
.replace(REQ_HANDLER_SYMBOL, fn.toString()) |
|||
|
|||
module.exports = getHandler |
@ -0,0 +1,110 @@ |
|||
// @flow
|
|||
const start = new Date() |
|||
|
|||
const { createServer } = require('http') |
|||
const { createConnection } = require('net') |
|||
const { spawn } = require('child_process') |
|||
const request = require('http').request |
|||
|
|||
let spawned = false |
|||
let PORT = null |
|||
let retriesLeft = 20 |
|||
let buffer = [] |
|||
|
|||
const flushBuffer = () => { |
|||
buffer.forEach(args => { |
|||
proxyRequest.apply(null, args) |
|||
}) |
|||
buffer = null |
|||
} |
|||
|
|||
const findFreePort = () => |
|||
new Promise((resolve, reject) => { |
|||
const srv = createServer(() => {}).listen(err => { |
|||
if (err) return reject(err) |
|||
const { port } = srv.address() |
|||
srv.close() |
|||
resolve(port) |
|||
}) |
|||
}) |
|||
|
|||
findFreePort().then( |
|||
port => { |
|||
PORT = port |
|||
|
|||
const env = Object.assign({}, process.env, { |
|||
// we need to add `/nodejs/bin` for GCP functions to
|
|||
// work correctly
|
|||
PATH: `/nodejs/bin:/usr/local/bin:/usr/bin`, |
|||
PORT |
|||
}) |
|||
|
|||
const NOW_CMD = [ |
|||
/*NOW_CMD*/ |
|||
][0] |
|||
|
|||
const NOW_SCRIPT = [ |
|||
/*NOW_SCRIPT*/ |
|||
][0] |
|||
|
|||
if (NOW_CMD) { |
|||
const cmd = spawn('/usr/bin/env', ['sh', '-c', NOW_CMD], { env: env }) |
|||
cmd.on('error', err => { |
|||
throw err |
|||
}) |
|||
} else { |
|||
process.env.PORT = PORT |
|||
require(`./${NOW_SCRIPT}`) |
|||
} |
|||
|
|||
const attemptConnect = () => { |
|||
const socket = createConnection(PORT) |
|||
socket.setTimeout(1000) |
|||
socket.on('error', retry) |
|||
socket.on('connect', () => { |
|||
socket.end() |
|||
spawned = true |
|||
flushBuffer() |
|||
console.log('spawn took', new Date() - start) |
|||
}) |
|||
socket.on('timeout', () => { |
|||
socket.end() |
|||
retry() |
|||
}) |
|||
} |
|||
|
|||
const retry = () => { |
|||
if (--retriesLeft < 0) { |
|||
throw new Error('Could not establish a connection to the http server') |
|||
} |
|||
// this is close to the bootup time of the most minimal
|
|||
// node server that could be created
|
|||
setTimeout(attemptConnect, 80) |
|||
} |
|||
|
|||
retry() |
|||
}, |
|||
err => { |
|||
throw err |
|||
} |
|||
) |
|||
|
|||
exports.handler = (...args) => { |
|||
// hack for lambda. we will refactor the handler injection
|
|||
// per-provider later
|
|||
if (args[1] && args[1].callbackWaitsForEmptyEventLoop) { |
|||
args[1].callbackWaitsForEmptyEventLoop = false |
|||
} |
|||
|
|||
if (spawned) { |
|||
proxyRequest.apply(null, args) |
|||
} else { |
|||
buffer.push(args) |
|||
} |
|||
} |
|||
|
|||
// we will replace the comment with the function with the logic
|
|||
// to proxy the request for every provider
|
|||
const proxyRequest = [ |
|||
/*PROXY_REQUEST_SOURCE*/ |
|||
][0].bind(null, request, () => PORT) |
@ -0,0 +1,43 @@ |
|||
// node
|
|||
const { readFileSync, writeFileSync } = require('fs') |
|||
const { join: joinPath } = require('path') |
|||
|
|||
// ours
|
|||
const getNowDir = require('../get-now-dir') |
|||
|
|||
const NOW_DIR = getNowDir() |
|||
const CONFIG_FILE_PATH = joinPath(NOW_DIR, 'config.json') |
|||
const AUTH_CONFIG_FILE_PATH = joinPath(NOW_DIR, 'auth.json') |
|||
|
|||
const prettify = obj => JSON.stringify(obj, null, 2) |
|||
|
|||
// reads `CONFIG_FILE_PATH` atomically
|
|||
const readConfigFile = () => readFileSync(CONFIG_FILE_PATH, 'utf8') |
|||
|
|||
// writes whatever's in `stuff` to `CONFIG_FILE_PATH`, atomically
|
|||
const writeToConfigFile = stuff => |
|||
writeFileSync(CONFIG_FILE_PATH, prettify(stuff)) |
|||
|
|||
// reads `AUTH_CONFIG_FILE_PATH` atomically
|
|||
const readAuthConfigFile = () => readFileSync(AUTH_CONFIG_FILE_PATH, 'utf8') |
|||
|
|||
// writes whatever's in `stuff` to `AUTH_CONFIG_FILE_PATH`, atomically
|
|||
const writeToAuthConfigFile = stuff => |
|||
writeFileSync(AUTH_CONFIG_FILE_PATH, prettify(stuff)) |
|||
|
|||
function getConfigFilePath() { |
|||
return CONFIG_FILE_PATH |
|||
} |
|||
|
|||
function getAuthConfigFilePath() { |
|||
return AUTH_CONFIG_FILE_PATH |
|||
} |
|||
|
|||
module.exports = { |
|||
readConfigFile, |
|||
writeToConfigFile, |
|||
readAuthConfigFile, |
|||
writeToAuthConfigFile, |
|||
getConfigFilePath, |
|||
getAuthConfigFilePath |
|||
} |
@ -0,0 +1,31 @@ |
|||
const { write } = require('clipboardy') |
|||
|
|||
const copyToClipboard = async ( |
|||
str: string, |
|||
shouldCopy = 'auto', |
|||
isTTY = process.stdout.isTTY |
|||
): boolean => { |
|||
if (shouldCopy === false) { |
|||
return false |
|||
} |
|||
|
|||
if (shouldCopy === 'auto') { |
|||
if (isTTY) { |
|||
await write(str) |
|||
return true |
|||
} else { |
|||
return false |
|||
} |
|||
} |
|||
|
|||
if (shouldCopy === true) { |
|||
await write(str) |
|||
return true |
|||
} |
|||
|
|||
throw new TypeError( |
|||
'The `copyToClipbard` value in now config has an invalid type' |
|||
) |
|||
} |
|||
|
|||
module.exports = copyToClipboard |
@ -0,0 +1,15 @@ |
|||
// @flow
|
|||
const { homedir } = require('os') |
|||
const { resolve } = require('path') |
|||
|
|||
const humanizePath = (path: string) => { |
|||
const resolved: string = resolve(path) |
|||
const _homedir = homedir() |
|||
if (resolved.indexOf(_homedir) === 0) { |
|||
return `~` + resolved.substr(_homedir.length) |
|||
} else { |
|||
return resolved |
|||
} |
|||
} |
|||
|
|||
module.exports = humanizePath |
@ -0,0 +1,82 @@ |
|||
const inquirer = require('inquirer') |
|||
const stripAnsi = require('strip-ansi') |
|||
|
|||
// eslint-disable-next-line import/no-unassigned-import
|
|||
require('./patch-inquirer') |
|||
|
|||
function getLength(string) { |
|||
let biggestLength = 0 |
|||
string.split('\n').map(str => { |
|||
str = stripAnsi(str) |
|||
if (str.length > biggestLength) { |
|||
biggestLength = str.length |
|||
} |
|||
return undefined |
|||
}) |
|||
return biggestLength |
|||
} |
|||
|
|||
module.exports = async function promptList({ |
|||
message = 'the question', |
|||
// eslint-disable-line no-unused-vars
|
|||
choices = [ |
|||
{ |
|||
name: 'something\ndescription\ndetails\netc', |
|||
value: 'something unique', |
|||
short: 'generally the first line of `name`' |
|||
} |
|||
], |
|||
pageSize = 15, // Show 15 lines without scrolling (~4 credit cards)
|
|||
separator = true, // Puts a blank separator between each choice
|
|||
// Wether the `abort` option will be at the `start` or the `end`
|
|||
// can be `false`
|
|||
abort = 'end' |
|||
}) { |
|||
let biggestLength = 0 |
|||
|
|||
choices = choices.map(choice => { |
|||
if (choice.name) { |
|||
const length = getLength(choice.name) |
|||
if (length > biggestLength) { |
|||
biggestLength = length |
|||
} |
|||
return choice |
|||
} |
|||
throw new Error('Invalid choice') |
|||
}) |
|||
|
|||
if (separator === true) { |
|||
choices = choices.reduce( |
|||
(prev, curr) => prev.concat(new inquirer.Separator(' '), curr), |
|||
[] |
|||
) |
|||
} |
|||
|
|||
if (abort) { |
|||
const abortSeparator = new inquirer.Separator('─'.repeat(biggestLength)) |
|||
const _abort = { |
|||
name: 'Abort', |
|||
value: undefined |
|||
} |
|||
if (abort === 'start') { |
|||
const blankSep = choices.shift() |
|||
choices.unshift(abortSeparator) |
|||
choices.unshift(_abort) |
|||
choices.unshift(blankSep) |
|||
} else { |
|||
choices.push(abortSeparator) |
|||
choices.push(_abort) |
|||
} |
|||
} |
|||
|
|||
const nonce = Date.now() |
|||
const answer = await inquirer.prompt({ |
|||
name: nonce, |
|||
type: 'list', |
|||
message, |
|||
choices, |
|||
pageSize |
|||
}) |
|||
|
|||
return answer[nonce] |
|||
} |
@ -0,0 +1,18 @@ |
|||
const inquirer = require('inquirer') |
|||
const chalk = require('chalk') |
|||
|
|||
// Here we patch inquirer to use a `>` instead of the ugly green `?`
|
|||
const getQuestion = function() { |
|||
var message = chalk.bold('> ' + this.opt.message) + ' ' |
|||
|
|||
// Append the default if available, and if question isn't answered
|
|||
if (this.opt.default != null && this.status !== 'answered') { |
|||
message += chalk.dim('(' + this.opt.default + ') ') |
|||
} |
|||
|
|||
return message |
|||
} |
|||
/* eslint-enable */ |
|||
|
|||
inquirer.prompt.prompts.input.prototype.getQuestion = getQuestion |
|||
inquirer.prompt.prompts.list.prototype.getQuestion = getQuestion |
@ -0,0 +1,68 @@ |
|||
// theirs
|
|||
const chalk = require('chalk') |
|||
|
|||
// ours
|
|||
const eraseLines = require('../output/erase-lines') |
|||
|
|||
module.exports = ( |
|||
label, |
|||
{ |
|||
defaultValue = false, |
|||
abortSequences = new Set(['\u0003', '\u001b']), // ctrl+c, esc
|
|||
resolveChars = new Set(['\r']), // enter
|
|||
yesChar = 'y', |
|||
noChar = 'n', |
|||
stdin = process.stdin, |
|||
stdout = process.stdout, |
|||
// if `true`, `eraseLines(1)` will be `stdout.write`d before
|
|||
// `resolve`ing or `reject`ing
|
|||
clearWhenDone = true |
|||
} = {} |
|||
) => { |
|||
return new Promise((resolve, reject) => { |
|||
const isRaw = stdin.isRaw |
|||
|
|||
stdin.setRawMode(true) |
|||
stdin.resume() |
|||
|
|||
function restore() { |
|||
if (clearWhenDone) { |
|||
stdout.write(eraseLines(1)) |
|||
} |
|||
stdin.setRawMode(isRaw) |
|||
stdin.pause() |
|||
stdin.removeListener('data', onData) |
|||
} |
|||
|
|||
function onData(buffer) { |
|||
const data = buffer.toString() |
|||
|
|||
if (data[0].toLowerCase() === yesChar) { |
|||
restore() |
|||
resolve(true) |
|||
} else if (data[0].toLowerCase() === noChar) { |
|||
restore() |
|||
resolve(false) |
|||
} else if (abortSequences.has(data)) { |
|||
restore() |
|||
const e = new Error('User abort') |
|||
e.code = 'USER_ABORT' |
|||
reject(e) |
|||
} else if (resolveChars.has(data[0])) { |
|||
restore() |
|||
resolve(defaultValue) |
|||
} else { |
|||
// ignore extraneous input
|
|||
} |
|||
} |
|||
|
|||
const defaultText = |
|||
defaultValue === null |
|||
? `[${yesChar}|${noChar}]` |
|||
: defaultValue |
|||
? `[${chalk.bold(yesChar.toUpperCase())}|${noChar}]` |
|||
: `[${yesChar}|${chalk.bold(noChar.toUpperCase())}]` |
|||
stdout.write(`${label} ${chalk.gray(defaultText)} `) |
|||
stdin.on('data', onData) |
|||
}) |
|||
} |
@ -0,0 +1,103 @@ |
|||
// inspired by https://github.com/zeit/email-prompt
|
|||
|
|||
// theirs
|
|||
const ansiEscapes = require('ansi-escapes') |
|||
const stripAnsi = require('strip-ansi') |
|||
|
|||
// ours
|
|||
const eraseLines = require('../output/erase-lines') |
|||
|
|||
const ESCAPES = { |
|||
LEFT: '\u001B[D', |
|||
RIGHT: '\u001B[C', |
|||
CTRL_C: '\x03', |
|||
BACKSPACE: '\u0008', |
|||
CTRL_H: '\u007F', |
|||
CARRIAGE: '\r' |
|||
} |
|||
|
|||
const textInput = ({ |
|||
label = 'Enter some text: ', |
|||
resolveChars = new Set([ESCAPES.CARRIAGE]), |
|||
abortChars = new Set([ESCAPES.CTRL_C]), |
|||
// if `true`, `eraseLines(1)` will be `stdout.write`d before
|
|||
// `resolve`ing or `reject`ing
|
|||
clearWhenDone = true |
|||
}) => { |
|||
return new Promise((resolve, reject) => { |
|||
if (!process.stdin.setRawMode) { |
|||
// Some environments (e.g., cygwin) don't provide a tty
|
|||
const e = new Error('stdin lacks setRawMode support') |
|||
e.userError = true |
|||
restore() |
|||
reject(e) |
|||
} |
|||
|
|||
const isRaw = process.stdin.isRaw |
|||
|
|||
process.stdin.setRawMode(true) |
|||
process.stdin.resume() |
|||
process.stdout.write(label) |
|||
|
|||
let input = '' // Whatever the user types
|
|||
let caretOffset = 0 // Left/right keys
|
|||
|
|||
const onData = buffer => { |
|||
let data = buffer.toString() |
|||
|
|||
if (abortChars.has(data)) { |
|||
const e = new Error('User abort') |
|||
e.code = 'USER_ABORT' |
|||
restore() |
|||
return reject(e) |
|||
} |
|||
|
|||
if (data === ESCAPES.LEFT) { |
|||
if (input.length > Math.abs(caretOffset)) { |
|||
caretOffset-- |
|||
} |
|||
} else if (data === ESCAPES.RIGHT) { |
|||
if (caretOffset < 0) { |
|||
caretOffset++ |
|||
} |
|||
} else if (data === '\x08' || data === '\x7f') { |
|||
// Delete key needs splicing according to caret position
|
|||
input = |
|||
input.substr(0, input.length + caretOffset - 1) + |
|||
input.substr(input.length + caretOffset) |
|||
} else { |
|||
if (resolveChars.has(data)) { |
|||
restore() |
|||
resolve(input) |
|||
} |
|||
|
|||
if (stripAnsi(data).length !== data.length) { |
|||
data = '' |
|||
} |
|||
|
|||
input = |
|||
input.substr(0, input.length + caretOffset) + |
|||
stripAnsi(data) + |
|||
input.substr(input.length + caretOffset) |
|||
} |
|||
|
|||
process.stdout.write(eraseLines(1) + label + input) |
|||
if (caretOffset) { |
|||
process.stdout.write(ansiEscapes.cursorBackward(Math.abs(caretOffset))) |
|||
} |
|||
} |
|||
|
|||
const restore = () => { |
|||
if (clearWhenDone) { |
|||
process.stdout.write(eraseLines(1)) |
|||
} |
|||
process.stdin.setRawMode(isRaw) |
|||
process.stdin.pause() |
|||
process.stdin.removeListener('data', onData) |
|||
} |
|||
|
|||
process.stdin.on('data', onData) |
|||
}) |
|||
} |
|||
|
|||
module.exports = textInput |
@ -0,0 +1,5 @@ |
|||
const { red } = require('chalk') |
|||
|
|||
const error = msg => `${red('> Aborted!')} ${msg}` |
|||
|
|||
module.exports = error |
@ -0,0 +1,7 @@ |
|||
const chars = { |
|||
// in some setups now.exe crashes if we use
|
|||
// the normal tick unicode character :|
|
|||
tick: process.platform === 'win32' ? '√' : '✔' |
|||
} |
|||
|
|||
module.exports = chars |
@ -0,0 +1,5 @@ |
|||
const { gray, cyan } = require('chalk') |
|||
|
|||
const cmd = text => `${gray('`')}${cyan(text)}${gray('`')}` |
|||
|
|||
module.exports = cmd |
@ -0,0 +1,5 @@ |
|||
const { gray } = require('chalk') |
|||
|
|||
const effect = msg => `${gray(`+ ${msg}`)}` |
|||
|
|||
module.exports = effect |
@ -0,0 +1,5 @@ |
|||
const ansiEscapes = require('ansi-escapes') |
|||
|
|||
const eraseLines = n => ansiEscapes.eraseLines(n) |
|||
|
|||
module.exports = eraseLines |
@ -0,0 +1,7 @@ |
|||
const { red } = require('chalk') |
|||
|
|||
// error('woot') === '> woot'
|
|||
// error('woot', 'yay') === 'woot\nyay'
|
|||
const error = (...msgs) => `${red('> Error!')} ${msgs.join('\n')}` |
|||
|
|||
module.exports = error |
@ -0,0 +1,5 @@ |
|||
const { bold } = require('chalk') |
|||
|
|||
const highlight = text => bold.underline(text) |
|||
|
|||
module.exports = highlight |
@ -0,0 +1,7 @@ |
|||
const { gray } = require('chalk') |
|||
|
|||
// info('woot') === '> woot'
|
|||
// info('woot', 'yay') === 'woot\nyay'
|
|||
const info = (...msgs) => `${gray('>')} ${msgs.join('\n')}` |
|||
|
|||
module.exports = info |
@ -0,0 +1,5 @@ |
|||
const { underline } = require('chalk') |
|||
|
|||
const highlight = text => underline(text) |
|||
|
|||
module.exports = highlight |
@ -0,0 +1,17 @@ |
|||
const { gray } = require('chalk') |
|||
|
|||
// listItem('woot') === '- woot'
|
|||
// listItem('->', 'woot') === '-> woot'
|
|||
// listItem(1, 'woot') === '1. woot'
|
|||
const listItem = (n, msg) => { |
|||
if (!msg) { |
|||
msg = n |
|||
n = '-' |
|||
} |
|||
if (!isNaN(n)) { |
|||
n += '.' |
|||
} |
|||
return `${gray(n)} ${msg}` |
|||
} |
|||
|
|||
module.exports = listItem |
@ -0,0 +1,3 @@ |
|||
const logo = () => (process.platform === 'win32' ? 'Δ' : '𝚫') |
|||
|
|||
module.exports = logo |
@ -0,0 +1,5 @@ |
|||
const { yellow } = require('chalk') |
|||
|
|||
const note = msg => `${yellow('> NOTE:')} ${msg}` |
|||
|
|||
module.exports = note |
@ -0,0 +1,6 @@ |
|||
const { cyan } = require('chalk') |
|||
const { tick } = require('./chars') |
|||
|
|||
const ok = msg => `${cyan(tick)} ${msg}` |
|||
|
|||
module.exports = ok |
@ -0,0 +1,5 @@ |
|||
const { gray, bold } = require('chalk') |
|||
|
|||
const param = text => `${gray('"')}${bold(text)}${gray('"')}` |
|||
|
|||
module.exports = param |
@ -0,0 +1,5 @@ |
|||
const { cyan } = require('chalk') |
|||
|
|||
const ready = msg => `${cyan('> Ready!')} ${msg}` |
|||
|
|||
module.exports = ready |
@ -0,0 +1,5 @@ |
|||
const { cyan } = require('chalk') |
|||
|
|||
const success = msg => `${cyan('> Success!')} ${msg}` |
|||
|
|||
module.exports = success |
@ -0,0 +1,16 @@ |
|||
const ora = require('ora') |
|||
const { gray } = require('chalk') |
|||
const eraseLines = require('./erase-lines') |
|||
|
|||
const wait = msg => { |
|||
const spinner = ora(gray(msg)) |
|||
spinner.color = 'gray' |
|||
spinner.start() |
|||
|
|||
return () => { |
|||
spinner.stop() |
|||
process.stdout.write(eraseLines(1)) |
|||
} |
|||
} |
|||
|
|||
module.exports = wait |
@ -0,0 +1,7 @@ |
|||
const sleep = ms => { |
|||
return new Promise(resolve => { |
|||
setTimeout(resolve, ms) |
|||
}) |
|||
} |
|||
|
|||
module.exports = sleep |
@ -0,0 +1,7 @@ |
|||
const resolve = require('./src/resolve') |
|||
|
|||
resolve('now-examples/wordpress') |
|||
.then(dir => { |
|||
console.log(dir) |
|||
}) |
|||
.catch(console.error) |
File diff suppressed because it is too large
Loading…
Reference in new issue