File tree 15 files changed +144
-81
lines changed
algorithmic_efficiency/workloads
15 files changed +144
-81
lines changed Original file line number Diff line number Diff line change @@ -173,7 +173,7 @@ def use_layer_norm(self) -> bool:
173
173
174
174
@property
175
175
def validation_target_value (self ) -> float :
176
- return 0.123744
176
+ return 0.123757
177
177
178
178
@property
179
179
def test_target_value (self ) -> float :
@@ -191,23 +191,23 @@ def use_resnet(self) -> bool:
191
191
192
192
@property
193
193
def validation_target_value (self ) -> float :
194
- return 0.124027
194
+ return 0.12415
195
195
196
196
@property
197
197
def test_target_value (self ) -> float :
198
- return 0.126468
198
+ return 0.12648
199
199
200
200
201
201
class Criteo1TbDlrmSmallEmbedInitWorkload (Criteo1TbDlrmSmallWorkload ):
202
202
203
203
@property
204
204
def validation_target_value (self ) -> float :
205
- return 0.124286
205
+ return 0.129657
206
206
207
207
@property
208
208
def test_target_value (self ) -> float :
209
209
# Todo
210
- return 0.126725
210
+ return 0.131967
211
211
212
212
@property
213
213
def embedding_init_multiplier (self ) -> float :
Original file line number Diff line number Diff line change @@ -254,7 +254,7 @@ def use_layer_norm(self) -> bool:
254
254
255
255
@property
256
256
def validation_target_value (self ) -> float :
257
- return 0.123744
257
+ return 0.123757
258
258
259
259
@property
260
260
def test_target_value (self ) -> float :
@@ -272,23 +272,23 @@ def use_resnet(self) -> bool:
272
272
273
273
@property
274
274
def validation_target_value (self ) -> float :
275
- return 0.124027
275
+ return 0.12415
276
276
277
277
@property
278
278
def test_target_value (self ) -> float :
279
- return 0.126468
279
+ return 0.12648
280
280
281
281
282
282
class Criteo1TbDlrmSmallEmbedInitWorkload (Criteo1TbDlrmSmallWorkload ):
283
283
284
284
@property
285
285
def validation_target_value (self ) -> float :
286
- return 0.124286
286
+ return 0.129657
287
287
288
288
@property
289
289
def test_target_value (self ) -> float :
290
290
# Todo
291
- return 0.126725
291
+ return 0.131967
292
292
293
293
@property
294
294
def embedding_init_multiplier (self ) -> float :
Original file line number Diff line number Diff line change @@ -272,11 +272,11 @@ def use_silu(self) -> bool:
272
272
273
273
@property
274
274
def validation_target_value (self ) -> float :
275
- return 1 - 0.22009
275
+ return 0.75445
276
276
277
277
@property
278
278
def test_target_value (self ) -> float :
279
- return 1 - 0.3426
279
+ return 0.6323
280
280
281
281
282
282
class ImagenetResNetGELUWorkload (ImagenetResNetWorkload ):
@@ -287,11 +287,11 @@ def use_gelu(self) -> bool:
287
287
288
288
@property
289
289
def validation_target_value (self ) -> float :
290
- return 1 - 0.22077
290
+ return 0.76765
291
291
292
292
@property
293
293
def test_target_value (self ) -> float :
294
- return 1 - 0.3402
294
+ return 0.6519
295
295
296
296
297
297
class ImagenetResNetLargeBNScaleWorkload (ImagenetResNetWorkload ):
@@ -302,8 +302,8 @@ def bn_init_scale(self) -> float:
302
302
303
303
@property
304
304
def validation_target_value (self ) -> float :
305
- return 1 - 0.23474
305
+ return 0.76526
306
306
307
307
@property
308
308
def test_target_value (self ) -> float :
309
- return 1 - 0.3577
309
+ return 0.6423
Original file line number Diff line number Diff line change @@ -326,11 +326,11 @@ def use_silu(self) -> bool:
326
326
327
327
@property
328
328
def validation_target_value (self ) -> float :
329
- return 1 - 0.22009
329
+ return 0.75445
330
330
331
331
@property
332
332
def test_target_value (self ) -> float :
333
- return 1 - 0.342
333
+ return 0.6323
334
334
335
335
336
336
class ImagenetResNetGELUWorkload (ImagenetResNetWorkload ):
@@ -341,11 +341,11 @@ def use_gelu(self) -> bool:
341
341
342
342
@property
343
343
def validation_target_value (self ) -> float :
344
- return 1 - 0.22077
344
+ return 0.76765
345
345
346
346
@property
347
347
def test_target_value (self ) -> float :
348
- return 1 - 0.3402
348
+ return 0.6519
349
349
350
350
351
351
class ImagenetResNetLargeBNScaleWorkload (ImagenetResNetWorkload ):
@@ -356,8 +356,8 @@ def bn_init_scale(self) -> float:
356
356
357
357
@property
358
358
def validation_target_value (self ) -> float :
359
- return 1 - 0.23474
359
+ return 0.76526
360
360
361
361
@property
362
362
def test_target_value (self ) -> float :
363
- return 1 - 0.3577
363
+ return 0.6423
Original file line number Diff line number Diff line change @@ -99,11 +99,11 @@ def use_glu(self) -> bool:
99
99
100
100
@property
101
101
def validation_target_value (self ) -> float :
102
- return 1 - 0.2233
102
+ return 0.75738
103
103
104
104
@property
105
105
def test_target_value (self ) -> float :
106
- return 1 - 0.3455
106
+ return 0.6359
107
107
108
108
109
109
class ImagenetVitPostLNWorkload (ImagenetVitWorkload ):
@@ -114,11 +114,11 @@ def use_post_layer_norm(self) -> bool:
114
114
115
115
@property
116
116
def validation_target_value (self ) -> float :
117
- return 1 - 0.24688
117
+ return 0.75312
118
118
119
119
@property
120
120
def test_target_value (self ) -> float :
121
- return 1 - 0.3714
121
+ return 0.6286
122
122
123
123
124
124
class ImagenetVitMapWorkload (ImagenetVitWorkload ):
@@ -129,8 +129,8 @@ def use_map(self) -> bool:
129
129
130
130
@property
131
131
def validation_target_value (self ) -> float :
132
- return 1 - 0.22886
132
+ return 0.77113
133
133
134
134
@property
135
135
def test_target_value (self ) -> float :
136
- return 1 - 0.3477
136
+ return 0.6523
Original file line number Diff line number Diff line change @@ -90,11 +90,11 @@ def use_glu(self) -> bool:
90
90
91
91
@property
92
92
def validation_target_value (self ) -> float :
93
- return 1 - 0.2233
93
+ return 0.75738
94
94
95
95
@property
96
96
def test_target_value (self ) -> float :
97
- return 1 - 0.3455
97
+ return 0.6359
98
98
99
99
100
100
class ImagenetVitPostLNWorkload (ImagenetVitWorkload ):
@@ -105,11 +105,11 @@ def use_post_layer_norm(self) -> bool:
105
105
106
106
@property
107
107
def validation_target_value (self ) -> float :
108
- return 1 - 0.24688
108
+ return 0.75312
109
109
110
110
@property
111
111
def test_target_value (self ) -> float :
112
- return 1 - 0.3714
112
+ return 0.6286
113
113
114
114
115
115
class ImagenetVitMapWorkload (ImagenetVitWorkload ):
@@ -120,8 +120,8 @@ def use_map(self) -> bool:
120
120
121
121
@property
122
122
def validation_target_value (self ) -> float :
123
- return 1 - 0.22886
123
+ return 0.77113
124
124
125
125
@property
126
126
def test_target_value (self ) -> float :
127
- return 1 - 0.3477
127
+ return 0.6523
Original file line number Diff line number Diff line change @@ -388,11 +388,11 @@ def attention_temperature(self) -> float:
388
388
389
389
@property
390
390
def validation_target_value (self ) -> float :
391
- return 0.082665
391
+ return 0.109977
392
392
393
393
@property
394
394
def test_target_value (self ) -> float :
395
- return 0.50168
395
+ return 0.068065
396
396
397
397
398
398
class LibriSpeechConformerLayerNormWorkload (LibriSpeechConformerWorkload ):
@@ -403,11 +403,11 @@ def use_post_layer_norm(self) -> bool:
403
403
404
404
@property
405
405
def validation_target_value (self ) -> float :
406
- return 0.085371
406
+ return 0.09731
407
407
408
408
@property
409
409
def test_target_value (self ) -> float :
410
- return 0.053096
410
+ return 0.05996
411
411
412
412
413
413
class LibriSpeechConformerGeluWorkload (LibriSpeechConformerWorkload ):
@@ -418,8 +418,8 @@ def use_gelu(self) -> bool:
418
418
419
419
@property
420
420
def validation_target_value (self ) -> float :
421
- return 0.077958
421
+ return 0.094114
422
422
423
423
@property
424
424
def test_target_value (self ) -> float :
425
- return 0.047643
425
+ return 0.056629
Original file line number Diff line number Diff line change @@ -354,11 +354,11 @@ def attention_temperature(self) -> float:
354
354
355
355
@property
356
356
def validation_target_value (self ) -> float :
357
- return 0.082665
357
+ return 0.109977
358
358
359
359
@property
360
360
def test_target_value (self ) -> float :
361
- return 0.050168
361
+ return 0.068065
362
362
363
363
364
364
class LibriSpeechConformerLayerNormWorkload (LibriSpeechConformerWorkload ):
@@ -369,11 +369,11 @@ def use_post_layer_norm(self) -> bool:
369
369
370
370
@property
371
371
def validation_target_value (self ) -> float :
372
- return 0.085371
372
+ return 0.09731
373
373
374
374
@property
375
375
def test_target_value (self ) -> float :
376
- return 0.053096
376
+ return 0.05996
377
377
378
378
379
379
class LibriSpeechConformerGeluWorkload (LibriSpeechConformerWorkload ):
@@ -384,8 +384,8 @@ def use_gelu(self) -> bool:
384
384
385
385
@property
386
386
def validation_target_value (self ) -> float :
387
- return 0.077958
387
+ return 0.094114
388
388
389
389
@property
390
390
def test_target_value (self ) -> float :
391
- return 0.047643
391
+ return 0.056629
Original file line number Diff line number Diff line change @@ -109,11 +109,11 @@ def use_tanh(self) -> bool:
109
109
110
110
@property
111
111
def validation_target_value (self ) -> float :
112
- return 0.133449
112
+ return 0.150883
113
113
114
114
@property
115
115
def test_target_value (self ) -> float :
116
- return 0.079810
116
+ return 0.098613
117
117
118
118
119
119
class LibriSpeechDeepSpeechNoResNetWorkload (LibriSpeechDeepSpeechWorkload ):
@@ -124,11 +124,11 @@ def enable_residual_connections(self) -> bool:
124
124
125
125
@property
126
126
def validation_target_value (self ) -> float :
127
- return 0.105042
127
+ return 0.131564
128
128
129
129
@property
130
130
def test_target_value (self ) -> float :
131
- return 0.060388
131
+ return 0.079297
132
132
133
133
134
134
class LibriSpeechDeepSpeechNormAndSpecAugWorkload (LibriSpeechDeepSpeechWorkload
@@ -156,8 +156,8 @@ def time_mask_count(self) -> int:
156
156
157
157
@property
158
158
def validation_target_value (self ) -> float :
159
- return 0.131553
159
+ return 0.14342
160
160
161
161
@property
162
162
def test_target_value (self ) -> float :
163
- return 0.082442
163
+ return 0.090976
Original file line number Diff line number Diff line change @@ -114,13 +114,29 @@ class LibriSpeechDeepSpeechTanhWorkload(LibriSpeechDeepSpeechWorkload):
114
114
def use_tanh (self ) -> bool :
115
115
return True
116
116
117
+ @property
118
+ def validation_target_value (self ) -> float :
119
+ return 0.150883
120
+
121
+ @property
122
+ def test_target_value (self ) -> float :
123
+ return 0.098613
124
+
117
125
118
126
class LibriSpeechDeepSpeechNoResNetWorkload (LibriSpeechDeepSpeechWorkload ):
119
127
120
128
@property
121
129
def enable_residual_connections (self ) -> bool :
122
130
return False
123
131
132
+ @property
133
+ def validation_target_value (self ) -> float :
134
+ return 0.131564
135
+
136
+ @property
137
+ def test_target_value (self ) -> float :
138
+ return 0.079297
139
+
124
140
125
141
class LibriSpeechDeepSpeechNormAndSpecAugWorkload (LibriSpeechDeepSpeechWorkload
126
142
):
@@ -144,3 +160,11 @@ def freq_mask_count(self) -> int:
144
160
@property
145
161
def time_mask_count (self ) -> int :
146
162
return 15
163
+
164
+ @property
165
+ def validation_target_value (self ) -> float :
166
+ return 0.14342
167
+
168
+ @property
169
+ def test_target_value (self ) -> float :
170
+ return 0.090976
You can’t perform that action at this time.
0 commit comments