diff --git a/bigquery-import/README.md b/bigquery-import/README.md index dba1285..a8a443f 100644 --- a/bigquery-import/README.md +++ b/bigquery-import/README.md @@ -23,7 +23,9 @@ As an example we'll be using a simple logs database structure: ## Setting up the sample -Add your Project ID to the env.json file. +Set the `bigquery.datasetName` and `bigquery.tableName` Google Cloud environment variables to match the Dataset name and the Table name where you want the logs written to. For this use: -You'll need to create a Service account's credentials file for your Google Cloud project. Then copy the credential file to `functions/service-accounts.json` +```bash +firebase env:set bigquery.datasetName="bar" bigquery.tableName="baz" +``` diff --git a/bigquery-import/env.json b/bigquery-import/env.json deleted file mode 100644 index 65a6f93..0000000 --- a/bigquery-import/env.json +++ /dev/null @@ -1,8 +0,0 @@ -// This file should be placed at the root of you project: the same directory as your firebase.json -// file, not your functions source directory. -// TODO: Input your Google Project ID. -{ - "google": { - "project_id": "" - } -} diff --git a/bigquery-import/functions/index.js b/bigquery-import/functions/index.js index 32f412a..f5e7079 100644 --- a/bigquery-import/functions/index.js +++ b/bigquery-import/functions/index.js @@ -18,35 +18,31 @@ const functions = require('firebase-functions'); const Q = require('q'); -// Authenticate to gcloud. -// TODO: Make sure you add your Google Project ID, Private key and Email into the env.json file. +// gcloud config. const gcloudconfig = { - projectId: functions.env.get('google.project_id'), - credentials: require('./service-accounts.json') + projectId: process.env.GCLOUD_PROJECT }; const gcloud = require('gcloud')(gcloudconfig); const bigquery = gcloud.bigquery(); -// TODO: Change with your BigQuery dataset name. -const dataset = bigquery.dataset(''); -// TODO: Change with your BigQuery table name. -const table = dataset.table(''); /** * Writes all logs from the Realtime Database into bigquery. */ -exports.addtobigquery = functions.database().path('/logs/$logid').on('value', event => { +exports.addtobigquery = functions.database().path('/logs/$logid').onWrite(event => { + // TODO: Make sure you set the `bigquery.datasetName` environment variable. + const dataset = bigquery.dataset(functions.env.bigquery.datasetname); + // TODO: Make sure you set the `bigquery.tableName` environment variable. + const table = dataset.table(functions.env.bigquery.tablename); + const result = Q.defer(); table.insert({ - ID: event.data.key(), + ID: event.data.key, MESSAGE: event.data.val().message, NUMBER: event.data.val().number }, (err, insertErr) => { - if (err) { - console.log(err); - result.reject(err); - } else if (insertErr) { - console.log(insertErr); - result.reject(insertErr); + if (err || insertErr) { + console.error(err || insertErr); + result.reject(err || insertErr); } else { result.resolve(); }