Skip to content

Commit 0678df5

Browse files
authored
Calibrator trainer needs to clear data so it can train another calibrator with new data (#1805)
1 parent 05284d6 commit 0678df5

21 files changed

+1326
-1
lines changed

src/Microsoft.ML.Data/Prediction/Calibrator.cs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1112,7 +1112,9 @@ public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight)
11121112
public ICalibrator FinishTraining(IChannel ch)
11131113
{
11141114
ch.Check(Data != null, "Calibrator trained on zero instances.");
1115-
return CreateCalibrator(ch);
1115+
var calibrator = CreateCalibrator(ch);
1116+
Data = null;
1117+
return calibrator;
11161118
}
11171119

11181120
public abstract ICalibrator CreateCalibrator(IChannel ch);
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1
2+
Not adding a normalizer.
3+
Training learner 0
4+
Training calibrator.
5+
Training learner 1
6+
Training calibrator.
7+
Training learner 2
8+
Training calibrator.
9+
Not training a calibrator because it is not needed.
10+
Not adding a normalizer.
11+
Training learner 0
12+
Training calibrator.
13+
Training learner 1
14+
Training calibrator.
15+
Training learner 2
16+
Training calibrator.
17+
Not training a calibrator because it is not needed.
18+
19+
Confusion table
20+
||========================
21+
PREDICTED || 0 | 1 | 2 | Recall
22+
TRUTH ||========================
23+
0 || 21 | 0 | 0 | 1.0000
24+
1 || 0 | 22 | 8 | 0.7333
25+
2 || 0 | 0 | 28 | 1.0000
26+
||========================
27+
Precision ||1.0000 |1.0000 |0.7778 |
28+
Accuracy(micro-avg): 0.898734
29+
Accuracy(macro-avg): 0.911111
30+
Log-loss: 0.372620
31+
Log-loss reduction: 65.736556
32+
33+
Confusion table
34+
||========================
35+
PREDICTED || 0 | 1 | 2 | Recall
36+
TRUTH ||========================
37+
0 || 29 | 0 | 0 | 1.0000
38+
1 || 0 | 18 | 2 | 0.9000
39+
2 || 0 | 0 | 22 | 1.0000
40+
||========================
41+
Precision ||1.0000 |1.0000 |0.9167 |
42+
Accuracy(micro-avg): 0.971831
43+
Accuracy(macro-avg): 0.966667
44+
Log-loss: 0.357704
45+
Log-loss reduction: 67.051654
46+
47+
OVERALL RESULTS
48+
---------------------------------------
49+
Accuracy(micro-avg): 0.935283 (0.0365)
50+
Accuracy(macro-avg): 0.938889 (0.0278)
51+
Log-loss: 0.365162 (0.0075)
52+
Log-loss reduction: 66.394105 (0.6575)
53+
54+
---------------------------------------
55+
Physical memory usage(MB): %Number%
56+
Virtual memory usage(MB): %Number%
57+
%DateTime% Time elapsed(s): %Number%
58+
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
OVA
2+
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
3+
0.935283 0.938889 0.365162 66.3941 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8}
4+
Lines changed: 151 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,151 @@
1+
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
2+
5 0 0 0.043587643807703136 0.957348645 0.04264102 1.03425764E-05 0 1 2
3+
6 0 0 0.20844569128859777 0.8118451 0.188126311 2.8563165E-05 0 1 2
4+
8 0 0 0.44491771498326443 0.640877 0.359043151 7.987263E-05 0 1 2
5+
9 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2
6+
10 0 0 0.064111239185181926 0.937900662 0.0620922744 7.04143076E-06 0 1 2
7+
11 0 0 0.19511668953898065 0.822738647 0.17722775 3.361244E-05 0 1 2
8+
18 0 0 0.040957067767296483 0.959870338 0.0401218459 7.82396E-06 0 1 2
9+
20 0 0 0.12310363986545093 0.884172 0.115805365 2.26346256E-05 0 1 2
10+
21 0 0 0.080695089616231355 0.9224749 0.07751174 1.33279436E-05 0 1 2
11+
25 0 0 0.30682306393325992 0.7357808 0.2641595 5.97413928E-05 0 1 2
12+
28 0 0 0.13141817305409223 0.876851 0.12313617 1.279574E-05 0 1 2
13+
31 0 0 0.10895984751128654 0.8967664 0.103215657 1.78892569E-05 0 1 2
14+
32 0 0 0.035477802883361699 0.965144157 0.0348526426 3.21791072E-06 0 1 2
15+
35 0 0 0.20274726386806977 0.8164846 0.183501333 1.40994789E-05 0 1 2
16+
37 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2
17+
40 0 0 0.12298365201239185 0.8842781 0.115710318 1.158336E-05 0 1 2
18+
41 0 0 0.63401679194266458 0.5304568 0.4693291 0.000214140542 0 1 2
19+
44 0 0 0.077454598775344219 0.925469041 0.07449977 3.11931653E-05 0 1 2
20+
45 0 0 0.3215830979624606 0.7250004 0.274949133 5.0463128E-05 0 1 2
21+
46 0 0 0.072538640149662562 0.9300298 0.06995974 1.0462416E-05 0 1 2
22+
48 0 0 0.070505947028000213 0.9319222 0.0680698752 7.905172E-06 0 1 2
23+
50 1 1 0.97585559443809855 0.376869768 0.358608037 0.264522225 1 2 0
24+
51 1 1 0.820648723050456 0.440146029 0.3583106 0.201543346 1 2 0
25+
52 1 2 1.0835275133336952 0.5653485 0.3383997 0.0962518156 2 1 0
26+
54 1 2 0.75898112148691677 0.472428739 0.468143165 0.0594281144 2 1 0
27+
56 1 2 1.0174162545586878 0.5111817 0.36152783 0.127290443 2 1 0
28+
60 1 1 0.30253484094402477 0.738942742 0.209481522 0.051575724 1 2 0
29+
63 1 2 0.77949402405350943 0.499299049 0.458638 0.04206293 2 1 0
30+
64 1 1 0.45505022537231249 0.6344161 0.288069278 0.0775146 1 0 2
31+
66 1 1 0.7154835565078782 0.488955617 0.46298942 0.04805498 1 2 0
32+
68 1 1 0.68322766519277334 0.504984438 0.482306838 0.0127087329 1 2 0
33+
69 1 1 0.31084089328775633 0.732830465 0.183353335 0.08381622 1 2 0
34+
70 1 2 1.1682944017762613 0.6530067 0.310896754 0.0360965841 2 1 0
35+
71 1 1 0.43377030209255479 0.6480611 0.185485169 0.166453749 1 0 2
36+
72 1 2 0.88766165771254424 0.578003168 0.41161713 0.0103796953 2 1 0
37+
73 1 1 0.66368933400718488 0.514948 0.4451455 0.03990646 1 2 0
38+
74 1 1 0.54404239638263385 0.5803973 0.245238408 0.174364269 1 2 0
39+
76 1 2 0.84677727980192752 0.5165659 0.4287946 0.0546395145 2 1 0
40+
77 1 2 1.2789386167391619 0.688494444 0.278332561 0.0331729874 2 1 0
41+
79 1 1 0.34033011681215469 0.7115354 0.2354252 0.0530394167 1 0 2
42+
82 1 1 0.35118632841443026 0.7038526 0.15209128 0.144056112 1 2 0
43+
88 1 1 0.4571578145475656 0.6330804 0.217808262 0.149111286 1 2 0
44+
90 1 1 0.54303381152243435 0.580983 0.390706122 0.0283109024 1 2 0
45+
91 1 1 0.7255881783753686 0.484039783 0.444033325 0.0719268844 1 2 0
46+
92 1 1 0.35286862388238727 0.7026695 0.20336625 0.09396427 1 2 0
47+
93 1 1 0.24150358472221847 0.785446 0.122569308 0.0919846743 1 0 2
48+
95 1 1 0.45747345580807686 0.6328806 0.223053813 0.144065529 1 2 0
49+
96 1 1 0.46692162584127184 0.6269292 0.2688824 0.1041884 1 2 0
50+
97 1 1 0.52181706235134551 0.593441248 0.265519917 0.14103885 1 2 0
51+
98 1 1 0.33964763199167614 0.7120212 0.255649149 0.03232969 1 0 2
52+
99 1 1 0.42298578084071409 0.655087948 0.2430654 0.10184665 1 2 0
53+
100 2 2 0.13591733259440952 0.8729148 0.125132382 0.00195282849 2 1 0
54+
102 2 2 0.13809510857610402 0.871015847 0.125785753 0.00319840666 2 1 0
55+
104 2 2 0.19932133588014422 0.8192866 0.178226635 0.00248679356 2 1 0
56+
105 2 2 0.09978434131070596 0.9050326 0.09390649 0.00106095837 2 1 0
57+
106 2 2 0.65516062299283195 0.519358635 0.4732639 0.00737748668 2 1 0
58+
108 2 2 0.36038464423836569 0.697408 0.300992548 0.00159944966 2 1 0
59+
109 2 2 0.042800052177573163 0.958102942 0.03757144 0.00432561943 2 1 0
60+
111 2 2 0.33893424257144178 0.7125293 0.282670647 0.004800048 2 1 0
61+
112 2 2 0.17819193567707683 0.8367818 0.156975582 0.006242614 2 1 0
62+
113 2 2 0.49781014911918742 0.6078603 0.388630718 0.00350892986 2 1 0
63+
115 2 2 0.1683952699484349 0.845019758 0.146008313 0.008971939 2 1 0
64+
117 2 2 0.023365514010699712 0.976905346 0.02015102 0.00294363522 2 1 0
65+
120 2 2 0.11133724227002473 0.894637 0.100207157 0.005155826 2 1 0
66+
121 2 2 0.43666882240878063 0.6461854 0.346793234 0.007021348 2 1 0
67+
122 2 2 0.13629671282280101 0.8725837 0.126684025 0.000732263 2 1 0
68+
123 2 2 0.4310483662194341 0.6498275 0.338038325 0.0121342046 2 1 0
69+
125 2 2 0.11330052370098145 0.8928823 0.101871319 0.0052463985 2 1 0
70+
128 2 2 0.27949760674881013 0.756163538 0.2411889 0.00264759478 2 1 0
71+
129 2 2 0.17530740569786113 0.839199 0.153847516 0.006953467 2 1 0
72+
131 2 2 0.031839393778411017 0.968662143 0.0223613773 0.008976495 2 1 0
73+
132 2 2 0.27137481365798816 0.7623307 0.235219285 0.00245000049 2 1 0
74+
133 2 2 0.43700297433440277 0.6459695 0.341389537 0.01264096 2 1 0
75+
137 2 2 0.23063259534491895 0.794031143 0.198468238 0.00750062242 2 1 0
76+
138 2 2 0.43845130281190237 0.6450346 0.3309319 0.0240335166 2 1 0
77+
141 2 2 0.17626166414917829 0.8383986 0.142890155 0.018711295 2 1 0
78+
144 2 2 0.099717233123952864 0.9050933 0.09041383 0.00449282629 2 1 0
79+
145 2 2 0.18787613378173548 0.828717351 0.161682889 0.009599784 2 1 0
80+
147 2 2 0.24798062433245444 0.780375063 0.20853655 0.01108838 2 1 0
81+
0 0 0 0.34881132522048625 0.705526233 0.294473559 1.92144441E-07 0 1 2
82+
1 0 0 0.36141580969752651 0.696689248 0.303309947 7.8389877E-07 0 1 2
83+
2 0 0 0.35568660847624228 0.7006922 0.299307227 5.929496E-07 0 1 2
84+
3 0 0 0.36470718348091719 0.694399953 0.30559817 1.84990029E-06 0 1 2
85+
4 0 0 0.34775770739677259 0.70627 0.293729782 2.00147142E-07 0 1 2
86+
7 0 0 0.35382023048081196 0.702001154 0.297998428 4.17606344E-07 0 1 2
87+
12 0 0 0.36098727532383801 0.696987867 0.303011417 7.32556146E-07 0 1 2
88+
13 0 0 0.35788558263546733 0.699153066 0.300846159 7.88259E-07 0 1 2
89+
14 0 0 0.33437356737542145 0.715786338 0.284213632 7.41558059E-09 0 1 2
90+
15 0 0 0.33259729807630167 0.7170589 0.2829411 2.302074E-08 0 1 2
91+
16 0 0 0.33963038748907248 0.712033451 0.2879665 5.7538923E-08 0 1 2
92+
17 0 0 0.34952968472792562 0.7050196 0.294980139 2.583559E-07 0 1 2
93+
19 0 0 0.34579385759256209 0.70765835 0.292341441 2.096021E-07 0 1 2
94+
22 0 0 0.34605819997965914 0.7074713 0.29252857 1.20912986E-07 0 1 2
95+
23 0 0 0.36811690986051288 0.6920363 0.307961673 2.02298725E-06 0 1 2
96+
24 0 0 0.37119922981165249 0.6899065 0.310090721 2.80658514E-06 0 1 2
97+
26 0 0 0.35941763729273518 0.698082745 0.301916152 1.08453048E-06 0 1 2
98+
27 0 0 0.35009366263991337 0.7046221 0.295377672 2.18394433E-07 0 1 2
99+
29 0 0 0.36473963008629695 0.6943774 0.305620819 1.74348168E-06 0 1 2
100+
30 0 0 0.36694890288891646 0.692845047 0.307153255 1.67791779E-06 0 1 2
101+
33 0 0 0.33532989874849606 0.715102136 0.284897834 2.06016182E-08 0 1 2
102+
34 0 0 0.36074853902438908 0.6971543 0.302845 6.899852E-07 0 1 2
103+
36 0 0 0.3442039710581144 0.708784342 0.2912156 5.37456621E-08 0 1 2
104+
38 0 0 0.36249420192434484 0.695938349 0.304059923 1.72375007E-06 0 1 2
105+
39 0 0 0.35302378267720547 0.7025605 0.2974392 3.31980715E-07 0 1 2
106+
42 0 0 0.35802404250832931 0.699056268 0.30094254 1.18013247E-06 0 1 2
107+
43 0 0 0.35964093968844252 0.6979269 0.3020715 1.62606943E-06 0 1 2
108+
47 0 0 0.35894549345005311 0.6984124 0.301586539 1.06725963E-06 0 1 2
109+
49 0 0 0.35354270878931848 0.702196 0.2978036 3.52685419E-07 0 1 2
110+
53 1 1 0.43814530313713385 0.645232 0.30703035 0.0477376431 1 2 0
111+
55 1 1 0.52991693789558192 0.588653862 0.3812489 0.0300972275 1 2 0
112+
57 1 1 0.3144098829942828 0.730219662 0.222309768 0.047470592 1 0 2
113+
58 1 1 0.16338480577647163 0.8492643 0.10643021 0.0443054661 1 2 0
114+
59 1 1 0.43732800775193598 0.6457596 0.2802832 0.0739572 1 2 0
115+
61 1 1 0.26826825521335035 0.7647026 0.165891945 0.06940546 1 2 0
116+
62 1 1 0.16519114388964468 0.84773165 0.07620503 0.0760633051 1 2 0
117+
65 1 1 0.12274628026245138 0.884488046 0.0737581253 0.0417538173 1 0 2
118+
67 1 1 0.18131529311314101 0.8341723 0.09860581 0.0672218949 1 0 2
119+
75 1 1 0.13597202822637602 0.872867048 0.06550142 0.0616315342 1 0 2
120+
78 1 1 0.45281685214847861 0.6358346 0.332585216 0.0315802023 1 2 0
121+
80 1 1 0.23111914057709354 0.7936449 0.105801627 0.10055349 1 0 2
122+
81 1 1 0.21336965456040224 0.807857454 0.137252137 0.0548904277 1 0 2
123+
83 1 2 1.1418187241491273 0.6765539 0.3192379 0.00420819456 2 1 0
124+
84 1 2 0.79904921193122347 0.5272309 0.449756384 0.0230127368 2 1 0
125+
85 1 1 0.32376577391953759 0.723419666 0.223107859 0.05347247 1 2 0
126+
86 1 1 0.19858499419365563 0.8198901 0.142534047 0.037575867 1 2 0
127+
87 1 1 0.32077518585346926 0.725586355 0.24517718 0.0292364415 1 2 0
128+
89 1 1 0.36379979176298916 0.695030332 0.242874637 0.06209502 1 2 0
129+
94 1 1 0.37955450575815275 0.684166133 0.262924 0.0529098734 1 2 0
130+
101 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0
131+
103 2 2 0.33454245635946434 0.71566546 0.282688916 0.00164566189 2 1 0
132+
107 2 2 0.32176538137222749 0.724868238 0.27461502 0.000516714761 2 1 0
133+
110 2 2 0.4735643051120339 0.622778535 0.371121377 0.006100062 2 1 0
134+
114 2 2 0.3247388954422577 0.722716033 0.2752217 0.00206224318 2 1 0
135+
116 2 2 0.36482478597312679 0.6943183 0.3033064 0.00237530912 2 1 0
136+
118 2 2 0.30234231913723036 0.739085 0.260826528 8.847632E-05 2 1 0
137+
119 2 2 0.37792268388420569 0.6852835 0.3114479 0.0032686044 2 1 0
138+
124 2 2 0.33777190179266953 0.713358 0.284956157 0.00168584171 2 1 0
139+
126 2 2 0.51721400727433164 0.5961792 0.395325035 0.008495758 2 1 0
140+
127 2 2 0.48223827874761288 0.617399931 0.374503255 0.008096788 2 1 0
141+
130 2 2 0.33112825051245398 0.718113065 0.281247973 0.0006389589 2 1 0
142+
134 2 2 0.34240991810493487 0.7100571 0.288253874 0.00168902089 2 1 0
143+
135 2 2 0.3238549270121831 0.7233552 0.2760729 0.0005719304 2 1 0
144+
136 2 2 0.31869296062169472 0.727098763 0.271312952 0.00158829393 2 1 0
145+
139 2 2 0.39792518591800413 0.6717123 0.325529337 0.002758371 2 1 0
146+
140 2 2 0.32133155702629967 0.7251828 0.273566216 0.0012509838 2 1 0
147+
142 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0
148+
143 2 2 0.31559105025409723 0.72935766 0.269767135 0.0008751799 2 1 0
149+
146 2 2 0.3760214987664387 0.6865876 0.310142934 0.00326948427 2 1 0
150+
148 2 2 0.3305544580259554 0.718525231 0.2789816 0.00249314215 2 1 0
151+
149 2 2 0.37408822283240173 0.6879162 0.307514042 0.00456974143 2 1 0
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1
2+
Not adding a normalizer.
3+
Training learner 0
4+
Making per-feature arrays
5+
Changing data from row-wise to column-wise
6+
Processed 71 instances
7+
Binning and forming Feature objects
8+
Reserved memory for tree learner: %Number% bytes
9+
Starting to train ...
10+
Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
11+
Training calibrator.
12+
Training learner 1
13+
Making per-feature arrays
14+
Changing data from row-wise to column-wise
15+
Processed 71 instances
16+
Binning and forming Feature objects
17+
Reserved memory for tree learner: %Number% bytes
18+
Starting to train ...
19+
Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
20+
Training calibrator.
21+
Training learner 2
22+
Making per-feature arrays
23+
Changing data from row-wise to column-wise
24+
Processed 71 instances
25+
Binning and forming Feature objects
26+
Reserved memory for tree learner: %Number% bytes
27+
Starting to train ...
28+
Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
29+
Training calibrator.
30+
Not training a calibrator because it is not needed.
31+
Not adding a normalizer.
32+
Training learner 0
33+
Making per-feature arrays
34+
Changing data from row-wise to column-wise
35+
Processed 79 instances
36+
Binning and forming Feature objects
37+
Reserved memory for tree learner: %Number% bytes
38+
Starting to train ...
39+
Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
40+
Training calibrator.
41+
Training learner 1
42+
Making per-feature arrays
43+
Changing data from row-wise to column-wise
44+
Processed 79 instances
45+
Binning and forming Feature objects
46+
Reserved memory for tree learner: %Number% bytes
47+
Starting to train ...
48+
Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
49+
Training calibrator.
50+
Training learner 2
51+
Making per-feature arrays
52+
Changing data from row-wise to column-wise
53+
Processed 79 instances
54+
Binning and forming Feature objects
55+
Reserved memory for tree learner: %Number% bytes
56+
Starting to train ...
57+
Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset.
58+
Training calibrator.
59+
Not training a calibrator because it is not needed.
60+
61+
Confusion table
62+
||========================
63+
PREDICTED || 0 | 1 | 2 | Recall
64+
TRUTH ||========================
65+
0 || 21 | 0 | 0 | 1.0000
66+
1 || 0 | 25 | 5 | 0.8333
67+
2 || 0 | 1 | 27 | 0.9643
68+
||========================
69+
Precision ||1.0000 |0.9615 |0.8438 |
70+
Accuracy(micro-avg): 0.924051
71+
Accuracy(macro-avg): 0.932540
72+
Log-loss: 0.197783
73+
Log-loss reduction: 81.813342
74+
75+
Confusion table
76+
||========================
77+
PREDICTED || 0 | 1 | 2 | Recall
78+
TRUTH ||========================
79+
0 || 29 | 0 | 0 | 1.0000
80+
1 || 0 | 19 | 1 | 0.9500
81+
2 || 0 | 2 | 20 | 0.9091
82+
||========================
83+
Precision ||1.0000 |0.9048 |0.9524 |
84+
Accuracy(micro-avg): 0.957746
85+
Accuracy(macro-avg): 0.953030
86+
Log-loss: 0.103360
87+
Log-loss reduction: 90.479422
88+
89+
OVERALL RESULTS
90+
---------------------------------------
91+
Accuracy(micro-avg): 0.940899 (0.0168)
92+
Accuracy(macro-avg): 0.942785 (0.0102)
93+
Log-loss: 0.150571 (0.0472)
94+
Log-loss reduction: 86.146382 (4.3330)
95+
96+
---------------------------------------
97+
Physical memory usage(MB): %Number%
98+
Virtual memory usage(MB): %Number%
99+
%DateTime% Time elapsed(s): %Number%
100+
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
OVA
2+
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
3+
0.940899 0.942785 0.150571 86.14639 FastForest{} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:FastForest{}
4+

0 commit comments

Comments
 (0)