Skip to content

Commit 8b96bbb

Browse files
alexander-fensterAce Nassri
authored and
Ace Nassri
committed
feat!: drop node8 support, support for async iterators (#482)
BREAKING CHANGE: The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM. New feature: methods with pagination now support async iteration.
1 parent 20e83ab commit 8b96bbb

36 files changed

+249
-247
lines changed

translate/automl/automlTranslationDataset.js

+64-64
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424

2525
async function createDataset(projectId) {
2626
// [START automl_translation_create_dataset]
27-
const automl = require(`@google-cloud/automl`);
27+
const automl = require('@google-cloud/automl');
2828

2929
const client = new automl.AutoMlClient();
3030
const computeRegion = 'us-central1';
@@ -55,25 +55,25 @@ async function createDataset(projectId) {
5555

5656
// Display the dataset information
5757
console.log(`Dataset name: ${dataset.name}`);
58-
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
58+
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
5959
console.log(`Dataset display name: ${dataset.displayName}`);
6060
console.log(`Dataset example count: ${dataset.exampleCount}`);
61-
console.log(`Translation dataset specification:`);
61+
console.log('Translation dataset specification:');
6262
console.log(
6363
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
6464
);
6565
console.log(
6666
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
6767
);
68-
console.log(`Dataset create time:`);
68+
console.log('Dataset create time:');
6969
console.log(`\tseconds: ${dataset.createTime.seconds}`);
7070
console.log(`\tnanos: ${dataset.createTime.nanos}`);
7171
// [END automl_translation_create_dataset]
7272
}
7373

7474
async function listDatasets(projectId, computeRegion, filter) {
7575
// [START automl_translation_list_datasets]
76-
const automl = require(`@google-cloud/automl`);
76+
const automl = require('@google-cloud/automl');
7777
const client = new automl.AutoMlClient();
7878

7979
/**
@@ -97,20 +97,20 @@ async function listDatasets(projectId, computeRegion, filter) {
9797
console.log('No datasets found!');
9898
return;
9999
}
100-
console.log(`List of datasets:`);
100+
console.log('List of datasets:');
101101
datasets.forEach(dataset => {
102102
console.log(`Dataset name: ${dataset.name}`);
103-
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
103+
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
104104
console.log(`Dataset display name: ${dataset.displayName}`);
105105
console.log(`Dataset example count: ${dataset.exampleCount}`);
106-
console.log(`Translation dataset specification:`);
106+
console.log('Translation dataset specification:');
107107
console.log(
108108
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
109109
);
110110
console.log(
111111
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
112112
);
113-
console.log(`Dataset create time:`);
113+
console.log('Dataset create time:');
114114
console.log(`\tseconds: ${dataset.createTime.seconds}`);
115115
console.log(`\tnanos: ${dataset.createTime.nanos}`);
116116
});
@@ -119,7 +119,7 @@ async function listDatasets(projectId, computeRegion, filter) {
119119

120120
async function getDataset(projectId, computeRegion, datasetId) {
121121
// [START automl_translation_get_dataset]
122-
const automl = require(`@google-cloud/automl`);
122+
const automl = require('@google-cloud/automl');
123123
const client = new automl.AutoMlClient();
124124

125125
/**
@@ -137,17 +137,17 @@ async function getDataset(projectId, computeRegion, datasetId) {
137137

138138
// Display the dataset information.
139139
console.log(`Dataset name: ${dataset.name}`);
140-
console.log(`Dataset id: ${dataset.name.split(`/`).pop(-1)}`);
140+
console.log(`Dataset id: ${dataset.name.split('/').pop(-1)}`);
141141
console.log(`Dataset display name: ${dataset.displayName}`);
142142
console.log(`Dataset example count: ${dataset.exampleCount}`);
143-
console.log(`Translation dataset specification:`);
143+
console.log('Translation dataset specification:');
144144
console.log(
145145
`\tSource language code: ${dataset.translationDatasetMetadata.sourceLanguageCode}`
146146
);
147147
console.log(
148148
`\tTarget language code: ${dataset.translationDatasetMetadata.targetLanguageCode}`
149149
);
150-
console.log(`Dataset create time:`);
150+
console.log('Dataset create time:');
151151
console.log(`\tseconds: ${dataset.createTime.seconds}`);
152152
console.log(`\tnanos: ${dataset.createTime.nanos}`);
153153

@@ -156,7 +156,7 @@ async function getDataset(projectId, computeRegion, datasetId) {
156156

157157
async function importData(projectId, computeRegion, datasetId, path) {
158158
// [START automl_translation_import_data]
159-
const automl = require(`@google-cloud/automl`);
159+
const automl = require('@google-cloud/automl');
160160

161161
const client = new automl.AutoMlClient();
162162

@@ -172,7 +172,7 @@ async function importData(projectId, computeRegion, datasetId, path) {
172172
const datasetFullId = client.datasetPath(projectId, computeRegion, datasetId);
173173

174174
// Get the multiple Google Cloud Storage URIs.
175-
const inputUris = path.split(`,`);
175+
const inputUris = path.split(',');
176176
const inputConfig = {
177177
gcsSource: {
178178
inputUris: inputUris,
@@ -184,19 +184,19 @@ async function importData(projectId, computeRegion, datasetId, path) {
184184
name: datasetFullId,
185185
inputConfig: inputConfig,
186186
});
187-
console.log(`Processing import...`);
187+
console.log('Processing import...');
188188
const operationResponses = await operation.promise();
189189
// The final result of the operation.
190190
if (operationResponses[2].done === true) {
191-
console.log(`Data imported.`);
191+
console.log('Data imported.');
192192
}
193193

194194
// [END automl_translation_import_data]
195195
}
196196

197197
async function deleteDataset(projectId, computeRegion, datasetId) {
198198
// [START automl_translation_delete_dataset]
199-
const automl = require(`@google-cloud/automl`);
199+
const automl = require('@google-cloud/automl');
200200
const client = new automl.AutoMlClient();
201201

202202
/**
@@ -213,85 +213,85 @@ async function deleteDataset(projectId, computeRegion, datasetId) {
213213
const [operations] = await client.deleteDataset({name: datasetFullId});
214214
const operationResponses = await operations.promise();
215215
// The final result of the operation.
216-
if (operationResponses[2].done === true) console.log(`Dataset deleted.`);
216+
if (operationResponses[2].done === true) console.log('Dataset deleted.');
217217

218218
// [END automl_translation_delete_dataset]
219219
}
220220

221-
require(`yargs`)
221+
require('yargs')
222222
.demand(1)
223223
.options({
224224
computeRegion: {
225-
alias: `c`,
226-
type: `string`,
225+
alias: 'c',
226+
type: 'string',
227227
default: 'us-central1',
228228
requiresArg: true,
229-
description: `region name e.g. "us-central1"`,
229+
description: 'region name e.g. "us-central1"',
230230
},
231231
datasetName: {
232-
alias: `n`,
233-
type: `string`,
234-
default: `testDataSet`,
232+
alias: 'n',
233+
type: 'string',
234+
default: 'testDataSet',
235235
requiresArg: true,
236-
description: `Name of the Dataset`,
236+
description: 'Name of the Dataset',
237237
},
238238
datasetId: {
239-
alias: `i`,
240-
type: `string`,
239+
alias: 'i',
240+
type: 'string',
241241
requiresArg: true,
242-
description: `Id of the dataset`,
242+
description: 'Id of the dataset',
243243
},
244244
filter: {
245-
alias: `f`,
246-
default: `translationDatasetMetadata:*`,
247-
type: `string`,
245+
alias: 'f',
246+
default: 'translationDatasetMetadata:*',
247+
type: 'string',
248248
requiresArg: true,
249-
description: `Name of the Dataset to search for`,
249+
description: 'Name of the Dataset to search for',
250250
},
251251
multilabel: {
252-
alias: `m`,
253-
type: `string`,
252+
alias: 'm',
253+
type: 'string',
254254
default: false,
255255
requiresArg: true,
256256
description:
257-
`Type of the classification problem, ` +
258-
`False - MULTICLASS, True - MULTILABEL.`,
257+
'Type of the classification problem, ' +
258+
'False - MULTICLASS, True - MULTILABEL.',
259259
},
260260
outputUri: {
261-
alias: `o`,
262-
type: `string`,
261+
alias: 'o',
262+
type: 'string',
263263
requiresArg: true,
264-
description: `URI (or local path) to export dataset`,
264+
description: 'URI (or local path) to export dataset',
265265
},
266266
path: {
267-
alias: `p`,
268-
type: `string`,
267+
alias: 'p',
268+
type: 'string',
269269
global: true,
270-
default: `gs://nodejs-docs-samples-vcm/en-ja.csv`,
270+
default: 'gs://nodejs-docs-samples-vcm/en-ja.csv',
271271
requiresArg: true,
272-
description: `URI or local path to input .csv, or array of .csv paths`,
272+
description: 'URI or local path to input .csv, or array of .csv paths',
273273
},
274274
projectId: {
275-
alias: `z`,
276-
type: `number`,
275+
alias: 'z',
276+
type: 'number',
277277
default: process.env.GCLOUD_PROJECT,
278278
requiresArg: true,
279-
description: `The GCLOUD_PROJECT string, e.g. "my-gcloud-project"`,
279+
description: 'The GCLOUD_PROJECT string, e.g. "my-gcloud-project"',
280280
},
281281
source: {
282-
alias: `s`,
283-
type: `string`,
282+
alias: 's',
283+
type: 'string',
284284
requiresArg: true,
285-
description: `The source language to be translated from`,
285+
description: 'The source language to be translated from',
286286
},
287287
target: {
288-
alias: `t`,
289-
type: `string`,
288+
alias: 't',
289+
type: 'string',
290290
requiresArg: true,
291-
description: `The target language to be translated to`,
291+
description: 'The target language to be translated to',
292292
},
293293
})
294-
.command(`createDataset`, `creates a new Dataset`, {}, opts =>
294+
.command('createDataset', 'creates a new Dataset', {}, opts =>
295295
createDataset(
296296
opts.projectId,
297297
opts.computeRegion,
@@ -300,24 +300,24 @@ require(`yargs`)
300300
opts.target
301301
)
302302
)
303-
.command(`list-datasets`, `list all Datasets`, {}, opts =>
303+
.command('list-datasets', 'list all Datasets', {}, opts =>
304304
listDatasets(opts.projectId, opts.computeRegion, opts.filter)
305305
)
306-
.command(`get-dataset`, `Get a Dataset`, {}, opts =>
306+
.command('get-dataset', 'Get a Dataset', {}, opts =>
307307
getDataset(opts.projectId, opts.computeRegion, opts.datasetId)
308308
)
309-
.command(`delete-dataset`, `Delete a dataset`, {}, opts =>
309+
.command('delete-dataset', 'Delete a dataset', {}, opts =>
310310
deleteDataset(opts.projectId, opts.computeRegion, opts.datasetId)
311311
)
312-
.command(`import-data`, `Import labeled items into dataset`, {}, opts =>
312+
.command('import-data', 'Import labeled items into dataset', {}, opts =>
313313
importData(opts.projectId, opts.computeRegion, opts.datasetId, opts.path)
314314
)
315-
.example(`node $0 create-dataset -n "newDataSet" -s "en" -t "ja"`)
316-
.example(`node $0 list-datasets -f "translationDatasetMetadata:*"`)
317-
.example(`node $0 get-dataset -i "DATASETID"`)
318-
.example(`node $0 delete-dataset -i "DATASETID"`)
315+
.example('node $0 create-dataset -n "newDataSet" -s "en" -t "ja"')
316+
.example('node $0 list-datasets -f "translationDatasetMetadata:*"')
317+
.example('node $0 get-dataset -i "DATASETID"')
318+
.example('node $0 delete-dataset -i "DATASETID"')
319319
.example(
320-
`node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"`
320+
'node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"'
321321
)
322322
.wrap(120)
323323
.recommendCommands()

0 commit comments

Comments
 (0)