Skip to content

Commit 656a42f

Browse files
authored
Updates ml.net reference of LightGBM to version 2.2 (#2448)
- Updates ml.net reference of LightGBM to version 2.2.3 (Fixes #2446) - Updated the lightgbm parsing code to handle inf, -inf (now checks for contains rather than equals). - Additional updates for handling NaN - Moved all LightGBM baseline tests from SingleDebug/SingleRelease to Common. - Added Seed parameter to LightGBM arguments to support setting LightGBM's random seed.
1 parent 174de1a commit 656a42f

File tree

104 files changed

+716
-8550
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

104 files changed

+716
-8550
lines changed

build/Dependencies.props

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
<!-- Other/Non-Core Product Dependencies -->
1515
<PropertyGroup>
1616
<GoogleProtobufPackageVersion>3.5.1</GoogleProtobufPackageVersion>
17-
<LightGBMPackageVersion>2.2.1.1</LightGBMPackageVersion>
17+
<LightGBMPackageVersion>2.2.3</LightGBMPackageVersion>
1818
<MicrosoftMLOnnxRuntimePackageVersion>0.2.1</MicrosoftMLOnnxRuntimePackageVersion>
1919
<MlNetMklDepsPackageVersion>0.0.0.7</MlNetMklDepsPackageVersion>
2020
<ParquetDotNetPackageVersion>2.1.3</ParquetDotNetPackageVersion>

src/Microsoft.ML.LightGBM/LightGbmArguments.cs

+4-1
Original file line numberDiff line numberDiff line change
@@ -371,6 +371,9 @@ public enum EvalMetricType
371371
[TlcModule.SweepableDiscreteParam("CatL2", new object[] { 0.1, 0.5, 1, 5, 10 })]
372372
public double CatL2 = 10;
373373

374+
[Argument(ArgumentType.AtMostOnce, HelpText = "Sets the random seed for LightGBM to use.")]
375+
public int? Seed;
376+
374377
[Argument(ArgumentType.Multiple, HelpText = "Parallel LightGBM Learning Algorithm", ShortName = "parag")]
375378
public ISupportParallel ParallelTrainer = new SingleTrainerFactory();
376379

@@ -390,7 +393,7 @@ internal Dictionary<string, object> ToDictionary(IHost host)
390393
if (NThread.HasValue)
391394
res["nthread"] = NThread.Value;
392395

393-
res["seed"] = host.Rand.Next();
396+
res["seed"] = (Seed.HasValue) ? Seed : host.Rand.Next();
394397

395398
string metric = null;
396399
switch (EvalMetric)

src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ public sealed class LightGbmMulticlassTrainer : LightGbmTrainerBase<VBuffer<floa
3535
public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification;
3636

3737
internal LightGbmMulticlassTrainer(IHostEnvironment env, Options options)
38-
: base(env, LoadNameValue, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumn))
38+
: base(env, LoadNameValue, options, TrainerUtils.MakeU4ScalarColumn(options.LabelColumn))
3939
{
4040
_numClass = -1;
4141
}

src/Microsoft.ML.LightGBM/WrappedLightGbmBooster.cs

+4-4
Original file line numberDiff line numberDiff line change
@@ -100,12 +100,12 @@ private static double[] Str2DoubleArray(string str, char delimiter)
100100
var values = new List<double>();
101101
foreach (var token in str.Split(delimiter))
102102
{
103-
var trimmed = token.Trim();
103+
var trimmed = token.Trim().ToLowerInvariant();
104104

105-
if (trimmed.Equals("inf", StringComparison.OrdinalIgnoreCase))
106-
values.Add(double.PositiveInfinity);
107-
else if (trimmed.Equals("-inf", StringComparison.OrdinalIgnoreCase))
105+
if (trimmed.Contains("-inf"))
108106
values.Add(double.NegativeInfinity);
107+
else if (trimmed.Contains("inf"))
108+
values.Add(double.PositiveInfinity);
109109
else if (trimmed.Contains("nan"))
110110
values.Add(double.NaN);
111111
else

src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs

+3
Original file line numberDiff line numberDiff line change
@@ -611,6 +611,9 @@ private void NormalizeSumToOne(float[] output, int count)
611611
for (int i = 0; i < count; i++)
612612
{
613613
var value = output[i];
614+
if (float.IsNaN(value))
615+
continue;
616+
614617
if (value >= 0)
615618
sum += value;
616619
else

test/BaselineOutput/Common/EntryPoints/core_manifest.json

+36
Original file line numberDiff line numberDiff line change
@@ -11606,6 +11606,15 @@
1160611606
]
1160711607
}
1160811608
},
11609+
{
11610+
"Name": "Seed",
11611+
"Type": "Int",
11612+
"Desc": "Sets the random seed for LightGBM to use.",
11613+
"Required": false,
11614+
"SortOrder": 150.0,
11615+
"IsNullable": true,
11616+
"Default": null
11617+
},
1160911618
{
1161011619
"Name": "ParallelTrainer",
1161111620
"Type": {
@@ -12101,6 +12110,15 @@
1210112110
]
1210212111
}
1210312112
},
12113+
{
12114+
"Name": "Seed",
12115+
"Type": "Int",
12116+
"Desc": "Sets the random seed for LightGBM to use.",
12117+
"Required": false,
12118+
"SortOrder": 150.0,
12119+
"IsNullable": true,
12120+
"Default": null
12121+
},
1210412122
{
1210512123
"Name": "ParallelTrainer",
1210612124
"Type": {
@@ -12596,6 +12614,15 @@
1259612614
]
1259712615
}
1259812616
},
12617+
{
12618+
"Name": "Seed",
12619+
"Type": "Int",
12620+
"Desc": "Sets the random seed for LightGBM to use.",
12621+
"Required": false,
12622+
"SortOrder": 150.0,
12623+
"IsNullable": true,
12624+
"Default": null
12625+
},
1259912626
{
1260012627
"Name": "ParallelTrainer",
1260112628
"Type": {
@@ -13091,6 +13118,15 @@
1309113118
]
1309213119
}
1309313120
},
13121+
{
13122+
"Name": "Seed",
13123+
"Type": "Int",
13124+
"Desc": "Sets the random seed for LightGBM to use.",
13125+
"Required": false,
13126+
"SortOrder": 150.0,
13127+
"IsNullable": true,
13128+
"Default": null
13129+
},
1309413130
{
1309513131
"Name": "ParallelTrainer",
1309613132
"Type": {

test/BaselineOutput/SingleDebug/LightGBMMC/LightGBMMC-CV-iris.key-out.txt renamed to test/BaselineOutput/Common/LightGBMMC/LightGBMMC-CV-iris.key-out.txt

+6-6
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ TRUTH ||========================
2121
Precision ||1.0000 |0.9310 |0.8966 |
2222
Accuracy(micro-avg): 0.936709
2323
Accuracy(macro-avg): 0.942857
24-
Log-loss: 0.312759
25-
Log-loss reduction: 71.240938
24+
Log-loss: 0.285741
25+
Log-loss reduction: 73.725386
2626

2727
Confusion table
2828
||========================
@@ -35,15 +35,15 @@ TRUTH ||========================
3535
Precision ||1.0000 |0.9048 |0.9524 |
3636
Accuracy(micro-avg): 0.957746
3737
Accuracy(macro-avg): 0.953030
38-
Log-loss: 0.193389
39-
Log-loss reduction: 82.186751
38+
Log-loss: 0.160970
39+
Log-loss reduction: 85.172898
4040

4141
OVERALL RESULTS
4242
---------------------------------------
4343
Accuracy(micro-avg): 0.947228 (0.0105)
4444
Accuracy(macro-avg): 0.947944 (0.0051)
45-
Log-loss: 0.253074 (0.0597)
46-
Log-loss reduction: 76.713844 (5.4729)
45+
Log-loss: 0.223355 (0.0624)
46+
Log-loss reduction: 79.449142 (5.7238)
4747

4848
---------------------------------------
4949
Physical memory usage(MB): %Number%
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
LightGBMMC
22
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /iter /lr /nl /mil /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
3-
0.947228 0.947944 0.253074 76.71384 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label} /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
3+
0.947228 0.947944 0.223355 79.44914 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label} /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
44

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,151 @@
1+
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
2+
5 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
3+
6 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
4+
8 0 0 0.10661010609724078 0.8988761 0.06135031 0.0397736244 0 1 2
5+
9 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
6+
10 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
7+
11 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
8+
18 0 0 0.20871018055857202 0.8116304 0.150229976 0.0381395929 0 1 2
9+
20 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
10+
21 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
11+
25 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
12+
28 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
13+
31 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
14+
32 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
15+
35 0 0 0.091914338968542939 0.9121833 0.0474542454 0.0403624475 0 1 2
16+
37 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
17+
40 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
18+
41 0 0 0.12022446935179025 0.8867214 0.0605207235 0.0527579151 0 1 2
19+
44 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
20+
45 0 0 0.10661010609724078 0.8988761 0.06135031 0.0397736244 0 1 2
21+
46 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
22+
48 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
23+
50 1 1 0.4718017766214076 0.623877168 0.288581163 0.08754168 1 2 0
24+
51 1 1 0.16129621134678201 0.851039946 0.09795632 0.0510037169 1 0 2
25+
52 1 2 1.7576448804634039 0.735976 0.172450528 0.0915734842 2 1 0
26+
54 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
27+
56 1 1 0.58884998083853646 0.554965138 0.35958758 0.085447304 1 2 0
28+
60 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
29+
63 1 1 0.40334659068829443 0.6680805 0.285767019 0.0461524948 1 2 0
30+
64 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
31+
66 1 1 0.10523849252688122 0.9001098 0.0527786054 0.047111582 1 0 2
32+
68 1 1 0.11845279887246257 0.888293743 0.06337312 0.04833313 1 2 0
33+
69 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
34+
70 1 1 0.58884998083853646 0.554965138 0.35958758 0.085447304 1 2 0
35+
71 1 1 0.11296366633272885 0.8931831 0.0539531074 0.0528637879 1 2 0
36+
72 1 2 1.5315509875875275 0.7353038 0.216200083 0.048496116 2 1 0
37+
73 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
38+
74 1 1 0.10601819760601948 0.8994083 0.05323223 0.04735948 1 0 2
39+
76 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
40+
77 1 2 2.1977178827233494 0.8378547 0.111056313 0.05108899 2 1 0
41+
79 1 1 0.52084859644468928 0.594016254 0.3608323 0.045151446 1 0 2
42+
82 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
43+
88 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
44+
90 1 1 0.1129939635005666 0.893156052 0.05824627 0.0485976934 1 2 0
45+
91 1 1 0.40334659068829443 0.6680805 0.285767019 0.0461524948 1 2 0
46+
92 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
47+
93 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
48+
95 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
49+
96 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
50+
97 1 1 0.10601819760601948 0.8994083 0.05323223 0.04735948 1 0 2
51+
98 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
52+
99 1 1 0.10825296364989409 0.897400558 0.0531134 0.04948606 1 0 2
53+
100 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
54+
102 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
55+
104 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
56+
105 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
57+
106 2 1 2.6016715940237773 0.8748904 0.07414953 0.0509601049 1 2 0
58+
108 2 2 0.16088853658016677 0.851386964 0.09868193 0.04993112 2 1 0
59+
109 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
60+
111 2 2 0.12042729005263374 0.886541545 0.06509956 0.0483589172 2 1 0
61+
112 2 2 0.10352719537737497 0.9016515 0.0498134866 0.0485349931 2 1 0
62+
113 2 2 0.13625251837638269 0.872622252 0.0777320862 0.0496456921 2 1 0
63+
115 2 2 0.14817072903452888 0.8622839 0.0871827 0.0505334176 2 0 1
64+
117 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
65+
120 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
66+
121 2 2 0.13625251837638269 0.872622252 0.0777320862 0.0496456921 2 1 0
67+
122 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
68+
123 2 2 0.21410877198720812 0.8072606 0.142007053 0.050732363 2 1 0
69+
125 2 2 0.18232859016677222 0.8333275 0.0897813961 0.0768911242 2 0 1
70+
128 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
71+
129 2 2 0.13494969421225142 0.873759866 0.07612156 0.0501186028 2 1 0
72+
131 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
73+
132 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
74+
133 2 2 0.31946514465077724 0.7265375 0.22336027 0.0501022264 2 1 0
75+
137 2 2 0.19509988206139145 0.8227525 0.08864207 0.08860543 2 0 1
76+
138 2 1 0.96751745636348951 0.571897149 0.3800253 0.0480775572 1 2 0
77+
141 2 2 0.15850114560830628 0.853422 0.08952415 0.05705389 2 0 1
78+
144 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
79+
145 2 2 0.11573805867711409 0.8907085 0.05954661 0.04974486 2 1 0
80+
147 2 2 0.11573805867711409 0.8907085 0.05954661 0.04974486 2 1 0
81+
0 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
82+
1 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
83+
2 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
84+
3 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
85+
4 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
86+
7 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
87+
12 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
88+
13 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
89+
14 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
90+
15 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
91+
16 0 0 0.097859552345142847 0.906776249 0.0505770147 0.04264675 0 1 2
92+
17 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
93+
19 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
94+
22 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
95+
23 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
96+
24 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
97+
26 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
98+
27 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
99+
29 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
100+
30 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
101+
33 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
102+
34 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
103+
36 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
104+
38 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
105+
39 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
106+
42 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
107+
43 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
108+
47 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
109+
49 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
110+
53 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
111+
55 1 1 0.092892934581938133 0.911291063 0.05356518 0.035143774 1 2 0
112+
57 1 1 0.097438230233297438 0.9071584 0.0560606159 0.036781013 1 2 0
113+
58 1 1 0.10073777708552108 0.9041701 0.06115444 0.0346754827 1 2 0
114+
59 1 1 0.12054139057437575 0.8864404 0.07761862 0.0359409936 1 2 0
115+
61 1 1 0.18697080511735736 0.829467952 0.133391976 0.0371401 1 2 0
116+
62 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
117+
65 1 1 0.099831826860305617 0.9049896 0.06030351 0.0347069129 1 2 0
118+
67 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
119+
75 1 1 0.12559806134822368 0.8819693 0.08420664 0.03382407 1 2 0
120+
78 1 1 0.18500488315787203 0.8311002 0.132115185 0.0367846042 1 2 0
121+
80 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
122+
81 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
123+
83 1 2 1.0043852293668838 0.6034677 0.366269737 0.0302625634 2 1 0
124+
84 1 1 0.20077823464619507 0.818093836 0.1422889 0.0396172553 1 2 0
125+
85 1 1 0.34652926989177313 0.7071381 0.2603224 0.0325394757 1 2 0
126+
86 1 1 0.17271648258942382 0.8413761 0.121384442 0.03723942 1 2 0
127+
87 1 1 0.10970840394213197 0.8960954 0.0690198541 0.0348847173 1 2 0
128+
89 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
129+
94 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
130+
101 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
131+
103 2 2 0.10737741038979709 0.8981866 0.0664602146 0.03535317 2 1 0
132+
107 2 2 0.10254983204222122 0.9025332 0.06078896 0.03667784 2 1 0
133+
110 2 2 0.22186873543222146 0.8010205 0.167042211 0.03193731 2 1 0
134+
114 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
135+
116 2 2 0.10737741038979709 0.8981866 0.0664602146 0.03535317 2 1 0
136+
118 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
137+
119 2 1 0.90674956791950001 0.558204055 0.40383473 0.03796125 1 2 0
138+
124 2 2 0.11442077120766009 0.8918826 0.07259029 0.0355271064 2 1 0
139+
126 2 2 0.40083567848156304 0.6697601 0.293806285 0.0364336222 2 1 0
140+
127 2 2 0.36452230942262392 0.694528341 0.2717706 0.0337010734 2 1 0
141+
130 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
142+
134 2 1 0.87385387392584235 0.537953556 0.41734007 0.04470639 1 2 0
143+
135 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
144+
136 2 2 0.12080648753504018 0.886205435 0.0794470161 0.0343475267 2 1 0
145+
139 2 2 0.11557084397877333 0.890857458 0.07233209 0.0368104242 2 1 0
146+
140 2 2 0.10625640378340648 0.899194062 0.0649875849 0.03581835 2 1 0
147+
142 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
148+
143 2 2 0.11442077120766009 0.8918826 0.07259029 0.0355271064 2 1 0
149+
146 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
150+
148 2 2 0.1306706508046877 0.877506733 0.08750639 0.034986876 2 1 0
151+
149 2 2 0.19668370574494914 0.8214504 0.145130783 0.03341879 2 1 0
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label}
1+
maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-4} data=%Data% seed=1
22
Not adding a normalizer.
33
Auto-tuning parameters: UseCat = False
44
Auto-tuning parameters: UseSoftmax = False
@@ -21,8 +21,8 @@ TRUTH ||========================
2121
Precision ||1.0000 |0.9310 |0.8966 |
2222
Accuracy(micro-avg): 0.936709
2323
Accuracy(macro-avg): 0.942857
24-
Log-loss: 0.312759
25-
Log-loss reduction: 71.240938
24+
Log-loss: 0.285741
25+
Log-loss reduction: 73.725386
2626

2727
Confusion table
2828
||========================
@@ -35,33 +35,27 @@ TRUTH ||========================
3535
Precision ||1.0000 |0.9048 |0.9524 |
3636
Accuracy(micro-avg): 0.957746
3737
Accuracy(macro-avg): 0.953030
38-
Log-loss: 0.193389
39-
Log-loss reduction: 82.186751
38+
Log-loss: 0.160970
39+
Log-loss reduction: 85.172898
4040

4141
OVERALL RESULTS
4242
---------------------------------------
4343
Accuracy(micro-avg): 0.947228 (0.0105)
4444
Accuracy(macro-avg): 0.947944 (0.0051)
45-
Log-loss: 0.253074 (0.0597)
46-
Log-loss reduction: 76.713844 (5.4729)
45+
Log-loss: 0.223355 (0.0624)
46+
Log-loss reduction: 79.449142 (5.7238)
4747

4848
---------------------------------------
4949
Physical memory usage(MB): %Number%
5050
Virtual memory usage(MB): %Number%
5151
%DateTime% Time elapsed(s): %Number%
5252

5353
--- Progress log ---
54-
[1] 'Building term dictionary' started.
55-
[1] (%Time%) 71 examples Total Terms: 3
56-
[1] 'Building term dictionary' finished in %Time%.
57-
[2] 'Loading data for LightGBM' started.
58-
[2] 'Loading data for LightGBM' finished in %Time%.
59-
[3] 'Training with LightGBM' started.
60-
[3] 'Training with LightGBM' finished in %Time%.
61-
[4] 'Building term dictionary #2' started.
62-
[4] (%Time%) 79 examples Total Terms: 3
63-
[4] 'Building term dictionary #2' finished in %Time%.
64-
[5] 'Loading data for LightGBM #2' started.
65-
[5] 'Loading data for LightGBM #2' finished in %Time%.
66-
[6] 'Training with LightGBM #2' started.
67-
[6] 'Training with LightGBM #2' finished in %Time%.
54+
[1] 'Loading data for LightGBM' started.
55+
[1] 'Loading data for LightGBM' finished in %Time%.
56+
[2] 'Training with LightGBM' started.
57+
[2] 'Training with LightGBM' finished in %Time%.
58+
[3] 'Loading data for LightGBM #2' started.
59+
[3] 'Loading data for LightGBM #2' finished in %Time%.
60+
[4] 'Training with LightGBM #2' started.
61+
[4] 'Training with LightGBM #2' finished in %Time%.
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
LightGBMMC
22
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /iter /lr /nl /mil /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
3-
0.947228 0.947944 0.253074 76.71384 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-4]:0 col=Features:1-4} data=%Data% seed=1 /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
3+
0.947228 0.947944 0.223355 79.44914 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-4} data=%Data% seed=1 /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
44

0 commit comments

Comments
 (0)