24
24
25
25
async function createDataset ( projectId ) {
26
26
// [START automl_translation_create_dataset]
27
- const automl = require ( ` @google-cloud/automl` ) ;
27
+ const automl = require ( ' @google-cloud/automl' ) ;
28
28
29
29
const client = new automl . AutoMlClient ( ) ;
30
30
const computeRegion = 'us-central1' ;
@@ -55,25 +55,25 @@ async function createDataset(projectId) {
55
55
56
56
// Display the dataset information
57
57
console . log ( `Dataset name: ${ dataset . name } ` ) ;
58
- console . log ( `Dataset id: ${ dataset . name . split ( `/` ) . pop ( - 1 ) } ` ) ;
58
+ console . log ( `Dataset id: ${ dataset . name . split ( '/' ) . pop ( - 1 ) } ` ) ;
59
59
console . log ( `Dataset display name: ${ dataset . displayName } ` ) ;
60
60
console . log ( `Dataset example count: ${ dataset . exampleCount } ` ) ;
61
- console . log ( ` Translation dataset specification:` ) ;
61
+ console . log ( ' Translation dataset specification:' ) ;
62
62
console . log (
63
63
`\tSource language code: ${ dataset . translationDatasetMetadata . sourceLanguageCode } `
64
64
) ;
65
65
console . log (
66
66
`\tTarget language code: ${ dataset . translationDatasetMetadata . targetLanguageCode } `
67
67
) ;
68
- console . log ( ` Dataset create time:` ) ;
68
+ console . log ( ' Dataset create time:' ) ;
69
69
console . log ( `\tseconds: ${ dataset . createTime . seconds } ` ) ;
70
70
console . log ( `\tnanos: ${ dataset . createTime . nanos } ` ) ;
71
71
// [END automl_translation_create_dataset]
72
72
}
73
73
74
74
async function listDatasets ( projectId , computeRegion , filter ) {
75
75
// [START automl_translation_list_datasets]
76
- const automl = require ( ` @google-cloud/automl` ) ;
76
+ const automl = require ( ' @google-cloud/automl' ) ;
77
77
const client = new automl . AutoMlClient ( ) ;
78
78
79
79
/**
@@ -97,20 +97,20 @@ async function listDatasets(projectId, computeRegion, filter) {
97
97
console . log ( 'No datasets found!' ) ;
98
98
return ;
99
99
}
100
- console . log ( ` List of datasets:` ) ;
100
+ console . log ( ' List of datasets:' ) ;
101
101
datasets . forEach ( dataset => {
102
102
console . log ( `Dataset name: ${ dataset . name } ` ) ;
103
- console . log ( `Dataset id: ${ dataset . name . split ( `/` ) . pop ( - 1 ) } ` ) ;
103
+ console . log ( `Dataset id: ${ dataset . name . split ( '/' ) . pop ( - 1 ) } ` ) ;
104
104
console . log ( `Dataset display name: ${ dataset . displayName } ` ) ;
105
105
console . log ( `Dataset example count: ${ dataset . exampleCount } ` ) ;
106
- console . log ( ` Translation dataset specification:` ) ;
106
+ console . log ( ' Translation dataset specification:' ) ;
107
107
console . log (
108
108
`\tSource language code: ${ dataset . translationDatasetMetadata . sourceLanguageCode } `
109
109
) ;
110
110
console . log (
111
111
`\tTarget language code: ${ dataset . translationDatasetMetadata . targetLanguageCode } `
112
112
) ;
113
- console . log ( ` Dataset create time:` ) ;
113
+ console . log ( ' Dataset create time:' ) ;
114
114
console . log ( `\tseconds: ${ dataset . createTime . seconds } ` ) ;
115
115
console . log ( `\tnanos: ${ dataset . createTime . nanos } ` ) ;
116
116
} ) ;
@@ -119,7 +119,7 @@ async function listDatasets(projectId, computeRegion, filter) {
119
119
120
120
async function getDataset ( projectId , computeRegion , datasetId ) {
121
121
// [START automl_translation_get_dataset]
122
- const automl = require ( ` @google-cloud/automl` ) ;
122
+ const automl = require ( ' @google-cloud/automl' ) ;
123
123
const client = new automl . AutoMlClient ( ) ;
124
124
125
125
/**
@@ -137,17 +137,17 @@ async function getDataset(projectId, computeRegion, datasetId) {
137
137
138
138
// Display the dataset information.
139
139
console . log ( `Dataset name: ${ dataset . name } ` ) ;
140
- console . log ( `Dataset id: ${ dataset . name . split ( `/` ) . pop ( - 1 ) } ` ) ;
140
+ console . log ( `Dataset id: ${ dataset . name . split ( '/' ) . pop ( - 1 ) } ` ) ;
141
141
console . log ( `Dataset display name: ${ dataset . displayName } ` ) ;
142
142
console . log ( `Dataset example count: ${ dataset . exampleCount } ` ) ;
143
- console . log ( ` Translation dataset specification:` ) ;
143
+ console . log ( ' Translation dataset specification:' ) ;
144
144
console . log (
145
145
`\tSource language code: ${ dataset . translationDatasetMetadata . sourceLanguageCode } `
146
146
) ;
147
147
console . log (
148
148
`\tTarget language code: ${ dataset . translationDatasetMetadata . targetLanguageCode } `
149
149
) ;
150
- console . log ( ` Dataset create time:` ) ;
150
+ console . log ( ' Dataset create time:' ) ;
151
151
console . log ( `\tseconds: ${ dataset . createTime . seconds } ` ) ;
152
152
console . log ( `\tnanos: ${ dataset . createTime . nanos } ` ) ;
153
153
@@ -156,7 +156,7 @@ async function getDataset(projectId, computeRegion, datasetId) {
156
156
157
157
async function importData ( projectId , computeRegion , datasetId , path ) {
158
158
// [START automl_translation_import_data]
159
- const automl = require ( ` @google-cloud/automl` ) ;
159
+ const automl = require ( ' @google-cloud/automl' ) ;
160
160
161
161
const client = new automl . AutoMlClient ( ) ;
162
162
@@ -172,7 +172,7 @@ async function importData(projectId, computeRegion, datasetId, path) {
172
172
const datasetFullId = client . datasetPath ( projectId , computeRegion , datasetId ) ;
173
173
174
174
// Get the multiple Google Cloud Storage URIs.
175
- const inputUris = path . split ( `,` ) ;
175
+ const inputUris = path . split ( ',' ) ;
176
176
const inputConfig = {
177
177
gcsSource : {
178
178
inputUris : inputUris ,
@@ -184,19 +184,19 @@ async function importData(projectId, computeRegion, datasetId, path) {
184
184
name : datasetFullId ,
185
185
inputConfig : inputConfig ,
186
186
} ) ;
187
- console . log ( ` Processing import...` ) ;
187
+ console . log ( ' Processing import...' ) ;
188
188
const operationResponses = await operation . promise ( ) ;
189
189
// The final result of the operation.
190
190
if ( operationResponses [ 2 ] . done === true ) {
191
- console . log ( ` Data imported.` ) ;
191
+ console . log ( ' Data imported.' ) ;
192
192
}
193
193
194
194
// [END automl_translation_import_data]
195
195
}
196
196
197
197
async function deleteDataset ( projectId , computeRegion , datasetId ) {
198
198
// [START automl_translation_delete_dataset]
199
- const automl = require ( ` @google-cloud/automl` ) ;
199
+ const automl = require ( ' @google-cloud/automl' ) ;
200
200
const client = new automl . AutoMlClient ( ) ;
201
201
202
202
/**
@@ -213,85 +213,85 @@ async function deleteDataset(projectId, computeRegion, datasetId) {
213
213
const [ operations ] = await client . deleteDataset ( { name : datasetFullId } ) ;
214
214
const operationResponses = await operations . promise ( ) ;
215
215
// The final result of the operation.
216
- if ( operationResponses [ 2 ] . done === true ) console . log ( ` Dataset deleted.` ) ;
216
+ if ( operationResponses [ 2 ] . done === true ) console . log ( ' Dataset deleted.' ) ;
217
217
218
218
// [END automl_translation_delete_dataset]
219
219
}
220
220
221
- require ( ` yargs` )
221
+ require ( ' yargs' )
222
222
. demand ( 1 )
223
223
. options ( {
224
224
computeRegion : {
225
- alias : `c` ,
226
- type : ` string` ,
225
+ alias : 'c' ,
226
+ type : ' string' ,
227
227
default : 'us-central1' ,
228
228
requiresArg : true ,
229
- description : ` region name e.g. "us-central1"` ,
229
+ description : ' region name e.g. "us-central1"' ,
230
230
} ,
231
231
datasetName : {
232
- alias : `n` ,
233
- type : ` string` ,
234
- default : ` testDataSet` ,
232
+ alias : 'n' ,
233
+ type : ' string' ,
234
+ default : ' testDataSet' ,
235
235
requiresArg : true ,
236
- description : ` Name of the Dataset` ,
236
+ description : ' Name of the Dataset' ,
237
237
} ,
238
238
datasetId : {
239
- alias : `i` ,
240
- type : ` string` ,
239
+ alias : 'i' ,
240
+ type : ' string' ,
241
241
requiresArg : true ,
242
- description : ` Id of the dataset` ,
242
+ description : ' Id of the dataset' ,
243
243
} ,
244
244
filter : {
245
- alias : `f` ,
246
- default : ` translationDatasetMetadata:*` ,
247
- type : ` string` ,
245
+ alias : 'f' ,
246
+ default : ' translationDatasetMetadata:*' ,
247
+ type : ' string' ,
248
248
requiresArg : true ,
249
- description : ` Name of the Dataset to search for` ,
249
+ description : ' Name of the Dataset to search for' ,
250
250
} ,
251
251
multilabel : {
252
- alias : `m` ,
253
- type : ` string` ,
252
+ alias : 'm' ,
253
+ type : ' string' ,
254
254
default : false ,
255
255
requiresArg : true ,
256
256
description :
257
- ` Type of the classification problem, ` +
258
- ` False - MULTICLASS, True - MULTILABEL.` ,
257
+ ' Type of the classification problem, ' +
258
+ ' False - MULTICLASS, True - MULTILABEL.' ,
259
259
} ,
260
260
outputUri : {
261
- alias : `o` ,
262
- type : ` string` ,
261
+ alias : 'o' ,
262
+ type : ' string' ,
263
263
requiresArg : true ,
264
- description : ` URI (or local path) to export dataset` ,
264
+ description : ' URI (or local path) to export dataset' ,
265
265
} ,
266
266
path : {
267
- alias : `p` ,
268
- type : ` string` ,
267
+ alias : 'p' ,
268
+ type : ' string' ,
269
269
global : true ,
270
- default : ` gs://nodejs-docs-samples-vcm/en-ja.csv` ,
270
+ default : ' gs://nodejs-docs-samples-vcm/en-ja.csv' ,
271
271
requiresArg : true ,
272
- description : ` URI or local path to input .csv, or array of .csv paths` ,
272
+ description : ' URI or local path to input .csv, or array of .csv paths' ,
273
273
} ,
274
274
projectId : {
275
- alias : `z` ,
276
- type : ` number` ,
275
+ alias : 'z' ,
276
+ type : ' number' ,
277
277
default : process . env . GCLOUD_PROJECT ,
278
278
requiresArg : true ,
279
- description : ` The GCLOUD_PROJECT string, e.g. "my-gcloud-project"` ,
279
+ description : ' The GCLOUD_PROJECT string, e.g. "my-gcloud-project"' ,
280
280
} ,
281
281
source : {
282
- alias : `s` ,
283
- type : ` string` ,
282
+ alias : 's' ,
283
+ type : ' string' ,
284
284
requiresArg : true ,
285
- description : ` The source language to be translated from` ,
285
+ description : ' The source language to be translated from' ,
286
286
} ,
287
287
target : {
288
- alias : `t` ,
289
- type : ` string` ,
288
+ alias : 't' ,
289
+ type : ' string' ,
290
290
requiresArg : true ,
291
- description : ` The target language to be translated to` ,
291
+ description : ' The target language to be translated to' ,
292
292
} ,
293
293
} )
294
- . command ( ` createDataset` , ` creates a new Dataset` , { } , opts =>
294
+ . command ( ' createDataset' , ' creates a new Dataset' , { } , opts =>
295
295
createDataset (
296
296
opts . projectId ,
297
297
opts . computeRegion ,
@@ -300,24 +300,24 @@ require(`yargs`)
300
300
opts . target
301
301
)
302
302
)
303
- . command ( ` list-datasets` , ` list all Datasets` , { } , opts =>
303
+ . command ( ' list-datasets' , ' list all Datasets' , { } , opts =>
304
304
listDatasets ( opts . projectId , opts . computeRegion , opts . filter )
305
305
)
306
- . command ( ` get-dataset` , ` Get a Dataset` , { } , opts =>
306
+ . command ( ' get-dataset' , ' Get a Dataset' , { } , opts =>
307
307
getDataset ( opts . projectId , opts . computeRegion , opts . datasetId )
308
308
)
309
- . command ( ` delete-dataset` , ` Delete a dataset` , { } , opts =>
309
+ . command ( ' delete-dataset' , ' Delete a dataset' , { } , opts =>
310
310
deleteDataset ( opts . projectId , opts . computeRegion , opts . datasetId )
311
311
)
312
- . command ( ` import-data` , ` Import labeled items into dataset` , { } , opts =>
312
+ . command ( ' import-data' , ' Import labeled items into dataset' , { } , opts =>
313
313
importData ( opts . projectId , opts . computeRegion , opts . datasetId , opts . path )
314
314
)
315
- . example ( ` node $0 create-dataset -n "newDataSet" -s "en" -t "ja"` )
316
- . example ( ` node $0 list-datasets -f "translationDatasetMetadata:*"` )
317
- . example ( ` node $0 get-dataset -i "DATASETID"` )
318
- . example ( ` node $0 delete-dataset -i "DATASETID"` )
315
+ . example ( ' node $0 create-dataset -n "newDataSet" -s "en" -t "ja"' )
316
+ . example ( ' node $0 list-datasets -f "translationDatasetMetadata:*"' )
317
+ . example ( ' node $0 get-dataset -i "DATASETID"' )
318
+ . example ( ' node $0 delete-dataset -i "DATASETID"' )
319
319
. example (
320
- ` node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"`
320
+ ' node $0 import-data -i "dataSetId" -p "gs://myproject/mytraindata.csv"'
321
321
)
322
322
. wrap ( 120 )
323
323
. recommendCommands ( )
0 commit comments