15
15
'use strict' ;
16
16
17
17
const { assert} = require ( 'chai' ) ;
18
- const { describe, it, before, after } = require ( 'mocha' ) ;
18
+ const { describe, it, before} = require ( 'mocha' ) ;
19
19
const { AutoMlClient} = require ( '@google-cloud/automl' ) . v1 ;
20
20
21
21
const cp = require ( 'child_process' ) ;
@@ -25,12 +25,66 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
25
25
26
26
const IMPORT_DATASET_REGION_TAG = 'import_dataset' ;
27
27
const LOCATION = 'us-central1' ;
28
+ const TWENTY_MINUTES_IN_SECONDS = 60 * 20 ;
29
+
30
+ // If two suites of tests are running parallel, importing and creating
31
+ // datasets can fail, with:
32
+ // No other operations should be working on projects/1046198160504/*.
33
+ const delay = async test => {
34
+ const retries = test . currentRetry ( ) ;
35
+ if ( retries === 0 ) return ; // no retry on the first failure.
36
+ // see: https://cloud.google.com/storage/docs/exponential-backoff:
37
+ const ms = Math . pow ( 2 , retries ) * 1000 + Math . random ( ) * 2000 ;
38
+ return new Promise ( done => {
39
+ console . info ( `retrying "${ test . title } " in ${ ms } ms` ) ;
40
+ setTimeout ( done , ms ) ;
41
+ } ) ;
42
+ } ;
28
43
29
44
describe ( 'Automl Import Dataset Test' , ( ) => {
30
45
const client = new AutoMlClient ( ) ;
31
46
let datasetId ;
32
47
33
- before ( 'should create a dataset' , async ( ) => {
48
+ before ( async ( ) => {
49
+ await cleanupOldDatasets ( ) ;
50
+ } ) ;
51
+
52
+ async function cleanupOldDatasets ( ) {
53
+ const projectId = await client . getProjectId ( ) ;
54
+ let request = {
55
+ parent : client . locationPath ( projectId , LOCATION ) ,
56
+ filter : 'translation_dataset_metadata:*' ,
57
+ } ;
58
+ const [ response ] = await client . listDatasets ( request ) ;
59
+ for ( const dataset of response ) {
60
+ try {
61
+ const id = dataset . name
62
+ . split ( '/' )
63
+ [ dataset . name . split ( '/' ) . length - 1 ] . split ( '\n' ) [ 0 ] ;
64
+ console . info ( `checking dataset ${ id } ` ) ;
65
+ if ( id . match ( / t e s t _ [ 0 - 9 a - f ] { 8 } / ) ) {
66
+ console . info ( `deleting dataset ${ id } ` ) ;
67
+ if (
68
+ dataset . createTime . seconds - Date . now ( ) / 1000 >
69
+ TWENTY_MINUTES_IN_SECONDS
70
+ ) {
71
+ console . info ( `dataset ${ id } is greater than 20 minutes old` ) ;
72
+ request = {
73
+ name : client . datasetPath ( projectId , LOCATION , id ) ,
74
+ } ;
75
+ const [ operation ] = await client . deleteDataset ( request ) ;
76
+ await operation . promise ( ) ;
77
+ }
78
+ }
79
+ } catch ( err ) {
80
+ console . warn ( err ) ;
81
+ }
82
+ }
83
+ }
84
+
85
+ it ( 'should create a dataset' , async function ( ) {
86
+ this . retries ( 5 ) ;
87
+ await delay ( this . test ) ;
34
88
const projectId = await client . getProjectId ( ) ;
35
89
const displayName = `test_${ uuid
36
90
. v4 ( )
@@ -53,7 +107,9 @@ describe('Automl Import Dataset Test', () => {
53
107
[ response . name . split ( '/' ) . length - 1 ] . split ( '\n' ) [ 0 ] ;
54
108
} ) ;
55
109
56
- it ( 'should create, import, and delete a dataset' , async ( ) => {
110
+ it ( 'should import dataset' , async function ( ) {
111
+ this . retries ( 5 ) ;
112
+ await delay ( this . test ) ;
57
113
const projectId = await client . getProjectId ( ) ;
58
114
const data = `gs://${ projectId } -automl-translate/en-ja-short.csv` ;
59
115
const import_output = execSync (
@@ -62,7 +118,9 @@ describe('Automl Import Dataset Test', () => {
62
118
assert . match ( import_output , / D a t a s e t i m p o r t e d / ) ;
63
119
} ) ;
64
120
65
- after ( 'delete created dataset' , async ( ) => {
121
+ it ( 'should delete created dataset' , async function ( ) {
122
+ this . retries ( 5 ) ;
123
+ await delay ( this . test ) ;
66
124
const projectId = await client . getProjectId ( ) ;
67
125
const request = {
68
126
name : client . datasetPath ( projectId , LOCATION , datasetId ) ,
0 commit comments