Browse Source

Update big query-import sample to SDK v0.3.0

Change-Id: I7176e866f033ee3a445f6e1eb68002f93c518e12
ryanpbrewster-patch-1
Nicolas Garnier 8 years ago
parent
commit
e1a50e1e04
  1. 6
      bigquery-import/README.md
  2. 8
      bigquery-import/env.json
  3. 28
      bigquery-import/functions/index.js

6
bigquery-import/README.md

@ -23,7 +23,9 @@ As an example we'll be using a simple logs database structure:
## Setting up the sample
Add your Project ID to the env.json file.
Set the `bigquery.datasetName` and `bigquery.tableName` Google Cloud environment variables to match the Dataset name and the Table name where you want the logs written to. For this use:
You'll need to create a Service account's credentials file for your Google Cloud project. Then copy the credential file to `functions/service-accounts.json`
```bash
firebase env:set bigquery.datasetName="bar" bigquery.tableName="baz"
```

8
bigquery-import/env.json

@ -1,8 +0,0 @@
// This file should be placed at the root of you project: the same directory as your firebase.json
// file, not your functions source directory.
// TODO: Input your Google Project ID.
{
"google": {
"project_id": "<MY_GOOGLE_PROJECT_ID>"
}
}

28
bigquery-import/functions/index.js

@ -18,35 +18,31 @@
const functions = require('firebase-functions');
const Q = require('q');
// Authenticate to gcloud.
// TODO: Make sure you add your Google Project ID, Private key and Email into the env.json file.
// gcloud config.
const gcloudconfig = {
projectId: functions.env.get('google.project_id'),
credentials: require('./service-accounts.json')
projectId: process.env.GCLOUD_PROJECT
};
const gcloud = require('gcloud')(gcloudconfig);
const bigquery = gcloud.bigquery();
// TODO: Change <YOUR-DATASET-NAME> with your BigQuery dataset name.
const dataset = bigquery.dataset('<YOUR-DATASET-NAME>');
// TODO: Change <YOUR-TABLE-NAME> with your BigQuery table name.
const table = dataset.table('<YOUR-TABLE-NAME>');
/**
* Writes all logs from the Realtime Database into bigquery.
*/
exports.addtobigquery = functions.database().path('/logs/$logid').on('value', event => {
exports.addtobigquery = functions.database().path('/logs/$logid').onWrite(event => {
// TODO: Make sure you set the `bigquery.datasetName` environment variable.
const dataset = bigquery.dataset(functions.env.bigquery.datasetname);
// TODO: Make sure you set the `bigquery.tableName` environment variable.
const table = dataset.table(functions.env.bigquery.tablename);
const result = Q.defer();
table.insert({
ID: event.data.key(),
ID: event.data.key,
MESSAGE: event.data.val().message,
NUMBER: event.data.val().number
}, (err, insertErr) => {
if (err) {
console.log(err);
result.reject(err);
} else if (insertErr) {
console.log(insertErr);
result.reject(insertErr);
if (err || insertErr) {
console.error(err || insertErr);
result.reject(err || insertErr);
} else {
result.resolve();
}

Loading…
Cancel
Save