Skip to content

Updates ml.net reference of LightGBM to version 2.2 #2448

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Feb 13, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build/Dependencies.props
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
<!-- Other/Non-Core Product Dependencies -->
<PropertyGroup>
<GoogleProtobufPackageVersion>3.5.1</GoogleProtobufPackageVersion>
<LightGBMPackageVersion>2.2.1.1</LightGBMPackageVersion>
<LightGBMPackageVersion>2.2.3</LightGBMPackageVersion>
<MicrosoftMLOnnxRuntimePackageVersion>0.2.1</MicrosoftMLOnnxRuntimePackageVersion>
<MlNetMklDepsPackageVersion>0.0.0.7</MlNetMklDepsPackageVersion>
<ParquetDotNetPackageVersion>2.1.3</ParquetDotNetPackageVersion>
Expand Down
5 changes: 4 additions & 1 deletion src/Microsoft.ML.LightGBM/LightGbmArguments.cs
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,9 @@ public enum EvalMetricType
[TlcModule.SweepableDiscreteParam("CatL2", new object[] { 0.1, 0.5, 1, 5, 10 })]
public double CatL2 = 10;

[Argument(ArgumentType.AtMostOnce, HelpText = "Sets the random seed for LightGBM to use.")]
public int? Seed;

[Argument(ArgumentType.Multiple, HelpText = "Parallel LightGBM Learning Algorithm", ShortName = "parag")]
public ISupportParallel ParallelTrainer = new SingleTrainerFactory();

Expand All @@ -390,7 +393,7 @@ internal Dictionary<string, object> ToDictionary(IHost host)
if (NThread.HasValue)
res["nthread"] = NThread.Value;

res["seed"] = host.Rand.Next();
res["seed"] = (Seed.HasValue) ? Seed : host.Rand.Next();

string metric = null;
switch (EvalMetric)
Expand Down
2 changes: 1 addition & 1 deletion src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public sealed class LightGbmMulticlassTrainer : LightGbmTrainerBase<VBuffer<floa
public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification;

internal LightGbmMulticlassTrainer(IHostEnvironment env, Options options)
: base(env, LoadNameValue, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumn))
: base(env, LoadNameValue, options, TrainerUtils.MakeU4ScalarColumn(options.LabelColumn))
{
_numClass = -1;
}
Expand Down
8 changes: 4 additions & 4 deletions src/Microsoft.ML.LightGBM/WrappedLightGbmBooster.cs
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,12 @@ private static double[] Str2DoubleArray(string str, char delimiter)
var values = new List<double>();
foreach (var token in str.Split(delimiter))
{
var trimmed = token.Trim();
var trimmed = token.Trim().ToLowerInvariant();

if (trimmed.Equals("inf", StringComparison.OrdinalIgnoreCase))
values.Add(double.PositiveInfinity);
else if (trimmed.Equals("-inf", StringComparison.OrdinalIgnoreCase))
if (trimmed.Contains("-inf"))
values.Add(double.NegativeInfinity);
else if (trimmed.Contains("inf"))
values.Add(double.PositiveInfinity);
else if (trimmed.Contains("nan"))
values.Add(double.NaN);
else
Expand Down
3 changes: 3 additions & 0 deletions src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs
Original file line number Diff line number Diff line change
Expand Up @@ -607,6 +607,9 @@ private void NormalizeSumToOne(float[] output, int count)
for (int i = 0; i < count; i++)
{
var value = output[i];
if (float.IsNaN(value))
continue;

if (value >= 0)
sum += value;
else
Expand Down
36 changes: 36 additions & 0 deletions test/BaselineOutput/Common/EntryPoints/core_manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -11606,6 +11606,15 @@
]
}
},
{
"Name": "Seed",
"Type": "Int",
"Desc": "Sets the random seed for LightGBM to use.",
"Required": false,
"SortOrder": 150.0,
"IsNullable": true,
"Default": null
},
{
"Name": "ParallelTrainer",
"Type": {
Expand Down Expand Up @@ -12101,6 +12110,15 @@
]
}
},
{
"Name": "Seed",
"Type": "Int",
"Desc": "Sets the random seed for LightGBM to use.",
"Required": false,
"SortOrder": 150.0,
"IsNullable": true,
"Default": null
},
{
"Name": "ParallelTrainer",
"Type": {
Expand Down Expand Up @@ -12596,6 +12614,15 @@
]
}
},
{
"Name": "Seed",
"Type": "Int",
"Desc": "Sets the random seed for LightGBM to use.",
"Required": false,
"SortOrder": 150.0,
"IsNullable": true,
"Default": null
},
{
"Name": "ParallelTrainer",
"Type": {
Expand Down Expand Up @@ -13091,6 +13118,15 @@
]
}
},
{
"Name": "Seed",
"Type": "Int",
"Desc": "Sets the random seed for LightGBM to use.",
"Required": false,
"SortOrder": 150.0,
"IsNullable": true,
"Default": null
},
{
"Name": "ParallelTrainer",
"Type": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ TRUTH ||========================
Precision ||1.0000 |0.9310 |0.8966 |
Accuracy(micro-avg): 0.936709
Accuracy(macro-avg): 0.942857
Log-loss: 0.312759
Log-loss reduction: 71.240938
Log-loss: 0.285741
Log-loss reduction: 73.725386

Confusion table
||========================
Expand All @@ -35,15 +35,15 @@ TRUTH ||========================
Precision ||1.0000 |0.9048 |0.9524 |
Accuracy(micro-avg): 0.957746
Accuracy(macro-avg): 0.953030
Log-loss: 0.193389
Log-loss reduction: 82.186751
Log-loss: 0.160970
Log-loss reduction: 85.172898

OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.947228 (0.0105)
Accuracy(macro-avg): 0.947944 (0.0051)
Log-loss: 0.253074 (0.0597)
Log-loss reduction: 76.713844 (5.4729)
Log-loss: 0.223355 (0.0624)
Log-loss reduction: 79.449142 (5.7238)

---------------------------------------
Physical memory usage(MB): %Number%
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
LightGBMMC
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /iter /lr /nl /mil /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.947228 0.947944 0.253074 76.71384 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label} /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
0.947228 0.947944 0.223355 79.44914 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label} /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1

151 changes: 151 additions & 0 deletions test/BaselineOutput/Common/LightGBMMC/LightGBMMC-CV-iris.key.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
6 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
8 0 0 0.10661010609724078 0.8988761 0.06135031 0.0397736244 0 1 2
9 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
10 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
11 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
18 0 0 0.20871018055857202 0.8116304 0.150229976 0.0381395929 0 1 2
20 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
21 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
25 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
28 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
31 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
32 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
35 0 0 0.091914338968542939 0.9121833 0.0474542454 0.0403624475 0 1 2
37 0 0 0.10718246050682641 0.898361742 0.06166233 0.0399759077 0 1 2
40 0 0 0.077036827737413466 0.925855756 0.0399397165 0.0342044979 0 2 1
41 0 0 0.12022446935179025 0.8867214 0.0605207235 0.0527579151 0 1 2
44 0 0 0.18795057786518246 0.82865566 0.13240473 0.03893963 0 1 2
45 0 0 0.10661010609724078 0.8988761 0.06135031 0.0397736244 0 1 2
46 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
48 0 0 0.077352973171323419 0.9255631 0.0400973819 0.0343395248 0 2 1
50 1 1 0.4718017766214076 0.623877168 0.288581163 0.08754168 1 2 0
51 1 1 0.16129621134678201 0.851039946 0.09795632 0.0510037169 1 0 2
52 1 2 1.7576448804634039 0.735976 0.172450528 0.0915734842 2 1 0
54 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
56 1 1 0.58884998083853646 0.554965138 0.35958758 0.085447304 1 2 0
60 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
63 1 1 0.40334659068829443 0.6680805 0.285767019 0.0461524948 1 2 0
64 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
66 1 1 0.10523849252688122 0.9001098 0.0527786054 0.047111582 1 0 2
68 1 1 0.11845279887246257 0.888293743 0.06337312 0.04833313 1 2 0
69 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
70 1 1 0.58884998083853646 0.554965138 0.35958758 0.085447304 1 2 0
71 1 1 0.11296366633272885 0.8931831 0.0539531074 0.0528637879 1 2 0
72 1 2 1.5315509875875275 0.7353038 0.216200083 0.048496116 2 1 0
73 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
74 1 1 0.10601819760601948 0.8994083 0.05323223 0.04735948 1 0 2
76 1 1 0.43228345578320954 0.6490254 0.306138515 0.0448361263 1 2 0
77 1 2 2.1977178827233494 0.8378547 0.111056313 0.05108899 2 1 0
79 1 1 0.52084859644468928 0.594016254 0.3608323 0.045151446 1 0 2
82 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
88 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
90 1 1 0.1129939635005666 0.893156052 0.05824627 0.0485976934 1 2 0
91 1 1 0.40334659068829443 0.6680805 0.285767019 0.0461524948 1 2 0
92 1 1 0.10833785101543712 0.8973244 0.0535823964 0.04909323 1 2 0
93 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
95 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
96 1 1 0.10185868247537189 0.9031572 0.0534541123 0.0433886945 1 0 2
97 1 1 0.10601819760601948 0.8994083 0.05323223 0.04735948 1 0 2
98 1 1 0.53794831506140339 0.5839451 0.3697834 0.04627151 1 0 2
99 1 1 0.10825296364989409 0.897400558 0.0531134 0.04948606 1 0 2
100 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
102 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
104 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
105 2 2 0.09360815216965325 0.9106395 0.0490188077 0.0403416753 2 0 1
106 2 1 2.6016715940237773 0.8748904 0.07414953 0.0509601049 1 2 0
108 2 2 0.16088853658016677 0.851386964 0.09868193 0.04993112 2 1 0
109 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
111 2 2 0.12042729005263374 0.886541545 0.06509956 0.0483589172 2 1 0
112 2 2 0.10352719537737497 0.9016515 0.0498134866 0.0485349931 2 1 0
113 2 2 0.13625251837638269 0.872622252 0.0777320862 0.0496456921 2 1 0
115 2 2 0.14817072903452888 0.8622839 0.0871827 0.0505334176 2 0 1
117 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
120 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
121 2 2 0.13625251837638269 0.872622252 0.0777320862 0.0496456921 2 1 0
122 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
123 2 2 0.21410877198720812 0.8072606 0.142007053 0.050732363 2 1 0
125 2 2 0.18232859016677222 0.8333275 0.0897813961 0.0768911242 2 0 1
128 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
129 2 2 0.13494969421225142 0.873759866 0.07612156 0.0501186028 2 1 0
131 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
132 2 2 0.10761945858193075 0.897969246 0.0530484878 0.0489822738 2 1 0
133 2 2 0.31946514465077724 0.7265375 0.22336027 0.0501022264 2 1 0
137 2 2 0.19509988206139145 0.8227525 0.08864207 0.08860543 2 0 1
138 2 1 0.96751745636348951 0.571897149 0.3800253 0.0480775572 1 2 0
141 2 2 0.15850114560830628 0.853422 0.08952415 0.05705389 2 0 1
144 2 2 0.13813397824344545 0.870982 0.08806214 0.04095585 2 0 1
145 2 2 0.11573805867711409 0.8907085 0.05954661 0.04974486 2 1 0
147 2 2 0.11573805867711409 0.8907085 0.05954661 0.04974486 2 1 0
0 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
1 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
2 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
3 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
4 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
7 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
12 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
13 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
14 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
15 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
16 0 0 0.097859552345142847 0.906776249 0.0505770147 0.04264675 0 1 2
17 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
19 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
22 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
23 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
24 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
26 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
27 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
29 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
30 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
33 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
34 0 0 0.10497272356713119 0.9003491 0.0541080274 0.0455429144 0 1 2
36 0 0 0.12145176975637179 0.885633767 0.07271381 0.0416524 0 1 2
38 0 0 0.10704420050868005 0.898485959 0.05399606 0.0475179851 0 1 2
39 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
42 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
43 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
47 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
49 0 0 0.10107568410061668 0.9038646 0.0504146144 0.0457207449 0 1 2
53 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
55 1 1 0.092892934581938133 0.911291063 0.05356518 0.035143774 1 2 0
57 1 1 0.097438230233297438 0.9071584 0.0560606159 0.036781013 1 2 0
58 1 1 0.10073777708552108 0.9041701 0.06115444 0.0346754827 1 2 0
59 1 1 0.12054139057437575 0.8864404 0.07761862 0.0359409936 1 2 0
61 1 1 0.18697080511735736 0.829467952 0.133391976 0.0371401 1 2 0
62 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
65 1 1 0.099831826860305617 0.9049896 0.06030351 0.0347069129 1 2 0
67 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
75 1 1 0.12559806134822368 0.8819693 0.08420664 0.03382407 1 2 0
78 1 1 0.18500488315787203 0.8311002 0.132115185 0.0367846042 1 2 0
80 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
81 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
83 1 2 1.0043852293668838 0.6034677 0.366269737 0.0302625634 2 1 0
84 1 1 0.20077823464619507 0.818093836 0.1422889 0.0396172553 1 2 0
85 1 1 0.34652926989177313 0.7071381 0.2603224 0.0325394757 1 2 0
86 1 1 0.17271648258942382 0.8413761 0.121384442 0.03723942 1 2 0
87 1 1 0.10970840394213197 0.8960954 0.0690198541 0.0348847173 1 2 0
89 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
94 1 1 0.094191382978362242 0.910108566 0.0542792119 0.0356122442 1 2 0
101 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
103 2 2 0.10737741038979709 0.8981866 0.0664602146 0.03535317 2 1 0
107 2 2 0.10254983204222122 0.9025332 0.06078896 0.03667784 2 1 0
110 2 2 0.22186873543222146 0.8010205 0.167042211 0.03193731 2 1 0
114 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
116 2 2 0.10737741038979709 0.8981866 0.0664602146 0.03535317 2 1 0
118 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
119 2 1 0.90674956791950001 0.558204055 0.40383473 0.03796125 1 2 0
124 2 2 0.11442077120766009 0.8918826 0.07259029 0.0355271064 2 1 0
126 2 2 0.40083567848156304 0.6697601 0.293806285 0.0364336222 2 1 0
127 2 2 0.36452230942262392 0.694528341 0.2717706 0.0337010734 2 1 0
130 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
134 2 1 0.87385387392584235 0.537953556 0.41734007 0.04470639 1 2 0
135 2 2 0.085412022670855015 0.9181339 0.0462341458 0.0356319249 2 1 0
136 2 2 0.12080648753504018 0.886205435 0.0794470161 0.0343475267 2 1 0
139 2 2 0.11557084397877333 0.890857458 0.07233209 0.0368104242 2 1 0
140 2 2 0.10625640378340648 0.899194062 0.0649875849 0.03581835 2 1 0
142 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
143 2 2 0.11442077120766009 0.8918826 0.07259029 0.0355271064 2 1 0
146 2 2 0.16029335824881216 0.851893842 0.115021013 0.0330851451 2 1 0
148 2 2 0.1306706508046877 0.877506733 0.08750639 0.034986876 2 1 0
149 2 2 0.19668370574494914 0.8214504 0.145130783 0.03341879 2 1 0
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:TX:0 col=Features:1-*} data=%Data% seed=1 xf=Term{col=Label}
maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-4} data=%Data% seed=1
Not adding a normalizer.
Auto-tuning parameters: UseCat = False
Auto-tuning parameters: UseSoftmax = False
Expand All @@ -21,8 +21,8 @@ TRUTH ||========================
Precision ||1.0000 |0.9310 |0.8966 |
Accuracy(micro-avg): 0.936709
Accuracy(macro-avg): 0.942857
Log-loss: 0.312759
Log-loss reduction: 71.240938
Log-loss: 0.285741
Log-loss reduction: 73.725386

Confusion table
||========================
Expand All @@ -35,33 +35,27 @@ TRUTH ||========================
Precision ||1.0000 |0.9048 |0.9524 |
Accuracy(micro-avg): 0.957746
Accuracy(macro-avg): 0.953030
Log-loss: 0.193389
Log-loss reduction: 82.186751
Log-loss: 0.160970
Log-loss reduction: 85.172898

OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.947228 (0.0105)
Accuracy(macro-avg): 0.947944 (0.0051)
Log-loss: 0.253074 (0.0597)
Log-loss reduction: 76.713844 (5.4729)
Log-loss: 0.223355 (0.0624)
Log-loss reduction: 79.449142 (5.7238)

---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%

--- Progress log ---
[1] 'Building term dictionary' started.
[1] (%Time%) 71 examples Total Terms: 3
[1] 'Building term dictionary' finished in %Time%.
[2] 'Loading data for LightGBM' started.
[2] 'Loading data for LightGBM' finished in %Time%.
[3] 'Training with LightGBM' started.
[3] 'Training with LightGBM' finished in %Time%.
[4] 'Building term dictionary #2' started.
[4] (%Time%) 79 examples Total Terms: 3
[4] 'Building term dictionary #2' finished in %Time%.
[5] 'Loading data for LightGBM #2' started.
[5] 'Loading data for LightGBM #2' finished in %Time%.
[6] 'Training with LightGBM #2' started.
[6] 'Training with LightGBM #2' finished in %Time%.
[1] 'Loading data for LightGBM' started.
[1] 'Loading data for LightGBM' finished in %Time%.
[2] 'Training with LightGBM' started.
[2] 'Training with LightGBM' finished in %Time%.
[3] 'Loading data for LightGBM #2' started.
[3] 'Loading data for LightGBM #2' finished in %Time%.
[4] 'Training with LightGBM #2' started.
[4] 'Training with LightGBM #2' finished in %Time%.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
LightGBMMC
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /iter /lr /nl /mil /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.947228 0.947944 0.253074 76.71384 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-4]:0 col=Features:1-4} data=%Data% seed=1 /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1
0.947228 0.947944 0.223355 79.44914 10 0.2 20 10 1 LightGBMMC %Data% %Output% 99 0 0 maml.exe CV tr=LightGBMMC{nt=1 iter=10 v=- lr=0.2 mil=10 nl=20} threads=- dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-4} data=%Data% seed=1 /iter:10;/lr:0.2;/nl:20;/mil:10;/nt:1

Loading