|
1 |
| -// Copyright 2016, Google, Inc. |
2 |
| -// Licensed under the Apache License, Version 2.0 (the "License"); |
3 |
| -// you may not use this file except in compliance with the License. |
4 |
| -// You may obtain a copy of the License at |
5 |
| -// |
6 |
| -// http://www.apache.org/licenses/LICENSE-2.0 |
7 |
| -// |
8 |
| -// Unless required by applicable law or agreed to in writing, software |
9 |
| -// distributed under the License is distributed on an "AS IS" BASIS, |
10 |
| -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
11 |
| -// See the License for the specific language governing permissions and |
12 |
| -// limitations under the License. |
| 1 | +/** |
| 2 | + * Copyright 2016, Google, Inc. |
| 3 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | + * you may not use this file except in compliance with the License. |
| 5 | + * You may obtain a copy of the License at |
| 6 | + * |
| 7 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | + * |
| 9 | + * Unless required by applicable law or agreed to in writing, software |
| 10 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | + * See the License for the specific language governing permissions and |
| 13 | + * limitations under the License. |
| 14 | + */ |
13 | 15 |
|
14 | 16 | 'use strict';
|
15 | 17 |
|
16 |
| -// [START setup] |
17 |
| -// By default, the client will authenticate using the service account file |
18 |
| -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use |
19 |
| -// the project specified by the GCLOUD_PROJECT environment variable. See |
20 |
| -// https://googlecloudplatform.github.io/google-cloud-node/#/docs/google-cloud/latest/guides/authentication |
21 |
| -var BigQuery = require('@google-cloud/bigquery'); |
22 |
| -// [END setup] |
23 |
| - |
24 |
| -function createDataset (datasetId, callback) { |
25 |
| - var bigquery = BigQuery(); |
26 |
| - var dataset = bigquery.dataset(datasetId); |
27 |
| - |
28 |
| - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=create |
29 |
| - dataset.create(function (err, dataset, apiResponse) { |
30 |
| - if (err) { |
31 |
| - return callback(err); |
32 |
| - } |
33 |
| - |
34 |
| - console.log('Created dataset: %s', datasetId); |
35 |
| - return callback(null, dataset, apiResponse); |
36 |
| - }); |
| 18 | +const BigQuery = require('@google-cloud/bigquery'); |
| 19 | + |
| 20 | +// [START bigquery_create_dataset] |
| 21 | +function createDataset (datasetId) { |
| 22 | + // Instantiates a client |
| 23 | + const bigquery = BigQuery(); |
| 24 | + |
| 25 | + // Creates a new dataset, e.g. "my_new_dataset" |
| 26 | + return bigquery.createDataset(datasetId) |
| 27 | + .then((results) => { |
| 28 | + const dataset = results[0]; |
| 29 | + console.log(`Dataset ${dataset.id} created.`); |
| 30 | + return dataset; |
| 31 | + }); |
37 | 32 | }
|
| 33 | +// [END bigquery_create_dataset] |
38 | 34 |
|
39 |
| -function deleteDataset (datasetId, callback) { |
40 |
| - var bigquery = BigQuery(); |
41 |
| - var dataset = bigquery.dataset(datasetId); |
| 35 | +// [START bigquery_delete_dataset] |
| 36 | +function deleteDataset (datasetId) { |
| 37 | + // Instantiates a client |
| 38 | + const bigquery = BigQuery(); |
42 | 39 |
|
43 |
| - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=delete |
44 |
| - dataset.delete(function (err) { |
45 |
| - if (err) { |
46 |
| - return callback(err); |
47 |
| - } |
| 40 | + // References an existing dataset, e.g. "my_dataset" |
| 41 | + const dataset = bigquery.dataset(datasetId); |
48 | 42 |
|
49 |
| - console.log('Deleted dataset: %s', datasetId); |
50 |
| - return callback(null); |
51 |
| - }); |
| 43 | + // Deletes the dataset |
| 44 | + return dataset.delete() |
| 45 | + .then(() => { |
| 46 | + console.log(`Dataset ${dataset.id} deleted.`); |
| 47 | + }); |
52 | 48 | }
|
| 49 | +// [END bigquery_delete_dataset] |
53 | 50 |
|
54 |
| -function listDatasets (projectId, callback) { |
55 |
| - var bigquery = BigQuery({ |
| 51 | +// [START bigquery_list_datasets] |
| 52 | +function listDatasets (projectId) { |
| 53 | + // Instantiates a client |
| 54 | + const bigquery = BigQuery({ |
56 | 55 | projectId: projectId
|
57 | 56 | });
|
58 | 57 |
|
59 |
| - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=getDatasets |
60 |
| - bigquery.getDatasets(function (err, datasets) { |
61 |
| - if (err) { |
62 |
| - return callback(err); |
63 |
| - } |
64 |
| - |
65 |
| - console.log('Found %d dataset(s)!', datasets.length); |
66 |
| - return callback(null, datasets); |
67 |
| - }); |
| 58 | + // Lists all datasets in the specified project |
| 59 | + return bigquery.getDatasets() |
| 60 | + .then((results) => { |
| 61 | + const datasets = results[0]; |
| 62 | + console.log('Datasets:'); |
| 63 | + datasets.forEach((dataset) => console.log(dataset.id)); |
| 64 | + return datasets; |
| 65 | + }); |
68 | 66 | }
|
| 67 | +// [END bigquery_list_datasets] |
69 | 68 |
|
70 |
| -// [START get_dataset_size] |
71 |
| -// Control-flow helper library |
72 |
| -var async = require('async'); |
73 |
| - |
74 |
| -function getDatasetSize (datasetId, projectId, callback) { |
75 |
| - // Instantiate a bigquery client |
76 |
| - var bigquery = BigQuery({ |
| 69 | +// [START bigquery_get_dataset_size] |
| 70 | +function getDatasetSize (datasetId, projectId) { |
| 71 | + // Instantiate a client |
| 72 | + const bigquery = BigQuery({ |
77 | 73 | projectId: projectId
|
78 | 74 | });
|
79 |
| - var dataset = bigquery.dataset(datasetId); |
80 |
| - |
81 |
| - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=getTables |
82 |
| - dataset.getTables(function (err, tables) { |
83 |
| - if (err) { |
84 |
| - return callback(err); |
85 |
| - } |
86 |
| - |
87 |
| - return async.map(tables, function (table, cb) { |
88 |
| - // Fetch more detailed info for each table |
89 |
| - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=get |
90 |
| - table.get(function (err, tableInfo) { |
91 |
| - if (err) { |
92 |
| - return cb(err); |
93 |
| - } |
94 |
| - // Return numBytes converted to Megabytes |
95 |
| - var numBytes = tableInfo.metadata.numBytes; |
96 |
| - return cb(null, (parseInt(numBytes, 10) / 1000) / 1000); |
97 |
| - }); |
98 |
| - }, function (err, sizes) { |
99 |
| - if (err) { |
100 |
| - return callback(err); |
101 |
| - } |
102 |
| - var sum = sizes.reduce(function (cur, prev) { |
103 |
| - return cur + prev; |
104 |
| - }, 0); |
105 |
| - |
106 |
| - console.log('Size of %s: %d MB', datasetId, sum); |
107 |
| - return callback(null, sum); |
| 75 | + |
| 76 | + // References an existing dataset, e.g. "my_dataset" |
| 77 | + const dataset = bigquery.dataset(datasetId); |
| 78 | + |
| 79 | + // Lists all tables in the dataset |
| 80 | + return dataset.getTables() |
| 81 | + .then((results) => results[0]) |
| 82 | + // Retrieve the metadata for each table |
| 83 | + .then((tables) => Promise.all(tables.map((table) => table.get()))) |
| 84 | + .then((results) => results.map((result) => result[0])) |
| 85 | + // Select the size of each table |
| 86 | + .then((tables) => tables.map((table) => (parseInt(table.metadata.numBytes, 10) / 1000) / 1000)) |
| 87 | + // Sum up the sizes |
| 88 | + .then((sizes) => sizes.reduce((cur, prev) => cur + prev, 0)) |
| 89 | + // Print and return the size |
| 90 | + .then((sum) => { |
| 91 | + console.log(`Size of ${dataset.id}: ${sum} MB`); |
| 92 | + return sum; |
108 | 93 | });
|
109 |
| - }); |
110 | 94 | }
|
111 |
| -// [END get_dataset_size] |
| 95 | +// [END bigquery_get_dataset_size] |
112 | 96 |
|
113 | 97 | // The command-line program
|
114 |
| -var cli = require('yargs'); |
115 |
| -var makeHandler = require('../utils').makeHandler; |
| 98 | +const cli = require(`yargs`); |
116 | 99 |
|
117 |
| -var program = module.exports = { |
| 100 | +const program = module.exports = { |
118 | 101 | createDataset: createDataset,
|
119 | 102 | deleteDataset: deleteDataset,
|
120 | 103 | listDatasets: listDatasets,
|
121 | 104 | getDatasetSize: getDatasetSize,
|
122 |
| - main: function (args) { |
| 105 | + main: (args) => { |
123 | 106 | // Run the command-line program
|
124 | 107 | cli.help().strict().parse(args).argv;
|
125 | 108 | }
|
126 | 109 | };
|
127 | 110 |
|
128 | 111 | cli
|
129 | 112 | .demand(1)
|
130 |
| - .command('create <datasetId>', 'Create a new dataset with the specified ID.', {}, function (options) { |
131 |
| - program.createDataset(options.datasetId, makeHandler()); |
132 |
| - }) |
133 |
| - .command('delete <datasetId>', 'Delete the dataset with the specified ID.', {}, function (options) { |
134 |
| - program.deleteDataset(options.datasetId, makeHandler()); |
| 113 | + .command(`create <datasetId>`, `Creates a new dataset.`, {}, (opts) => { |
| 114 | + program.createDataset(opts.datasetId); |
135 | 115 | })
|
136 |
| - .command('list', 'List datasets in the specified project.', {}, function (options) { |
137 |
| - program.listDatasets(options.projectId, makeHandler(true, 'id')); |
| 116 | + .command(`delete <datasetId>`, `Deletes a dataset.`, {}, (opts) => { |
| 117 | + program.deleteDataset(opts.datasetId); |
138 | 118 | })
|
139 |
| - .command('size <datasetId>', 'Calculate the size of the specified dataset.', {}, function (options) { |
140 |
| - program.getDatasetSize(options.datasetId, options.projectId, makeHandler()); |
| 119 | + .command(`list [projectId]`, `Lists all datasets in the specified project or the current project.`, {}, (opts) => { |
| 120 | + program.listDatasets(opts.projectId || process.env.GCLOUD_PROJECT); |
141 | 121 | })
|
142 |
| - .option('projectId', { |
143 |
| - alias: 'p', |
144 |
| - requiresArg: true, |
145 |
| - type: 'string', |
146 |
| - default: process.env.GCLOUD_PROJECT, |
147 |
| - description: 'Optionally specify the project ID to use.', |
148 |
| - global: true |
| 122 | + .command(`size <datasetId> [projectId]`, `Calculates the size of a dataset.`, {}, (opts) => { |
| 123 | + program.getDatasetSize(opts.datasetId, opts.projectId || process.env.GCLOUD_PROJECT); |
149 | 124 | })
|
150 |
| - .example('node $0 create my_dataset', 'Create a new dataset with the ID "my_dataset".') |
151 |
| - .example('node $0 delete my_dataset', 'Delete a dataset identified as "my_dataset".') |
152 |
| - .example('node $0 list', 'List datasets.') |
153 |
| - .example('node $0 list -p bigquery-public-data', 'List datasets in the "bigquery-public-data" project.') |
154 |
| - .example('node $0 size my_dataset', 'Calculate the size of "my_dataset".') |
155 |
| - .example('node $0 size hacker_news -p bigquery-public-data', 'Calculate the size of "bigquery-public-data:hacker_news".') |
| 125 | + .example(`node $0 create my_dataset`, `Creates a new dataset named "my_dataset".`) |
| 126 | + .example(`node $0 delete my_dataset`, `Deletes a dataset named "my_dataset".`) |
| 127 | + .example(`node $0 list`, `Lists all datasets in the current project.`) |
| 128 | + .example(`node $0 list bigquery-public-data`, `Lists all datasets in the "bigquery-public-data" project.`) |
| 129 | + .example(`node $0 size my_dataset`, `Calculates the size of "my_dataset" in the current project.`) |
| 130 | + .example(`node $0 size hacker_news bigquery-public-data`, `Calculates the size of "bigquery-public-data:hacker_news".`) |
156 | 131 | .wrap(120)
|
157 | 132 | .recommendCommands()
|
158 |
| - .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); |
| 133 | + .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`); |
159 | 134 |
|
160 | 135 | if (module === require.main) {
|
161 | 136 | program.main(process.argv.slice(2));
|
|
0 commit comments