Skip to content

[ML] Fix possible cause for "Bad variance scale nan" log errors #1225

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
May 11, 2020
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/CHANGELOG.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@
{ml-pull}1197[#1197].)
* Improve the initialization of seasonal components for anomaly detection. (See
{ml-pull}1201[#1201], issue: {ml-issue}#1178[#1178].)
* Fix possible root cause for "Bad variance scale nan" log errors. (See {ml-pull}1225[#1225].)

== {es} version 7.7.1

Expand Down
29 changes: 20 additions & 9 deletions lib/maths/CAdaptiveBucketing.cc
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ bool CAdaptiveBucketing::initialize(double a, double b, std::size_t n) {
void CAdaptiveBucketing::initialValues(core_t::TTime start,
core_t::TTime end,
const TFloatMeanAccumulatorVec& values) {
if (!this->initialized()) {
if (this->initialized() == false) {
return;
}

Expand Down Expand Up @@ -406,7 +406,7 @@ void CAdaptiveBucketing::refine(core_t::TTime time) {
LOG_TRACE(<< "totalAveragingError = " << totalAveragingError);

double n_{static_cast<double>(n)};
double step{(1 - n_ * EPS) * totalAveragingError / n_};
double step{(1.0 - n_ * EPS) * totalAveragingError / n_};
TFloatVec endpoints{m_Endpoints};
LOG_TRACE(<< "step = " << step);

Expand Down Expand Up @@ -505,7 +505,7 @@ bool CAdaptiveBucketing::knots(core_t::TTime time,
double a{m_Endpoints[i]};
double b{m_Endpoints[i + 1]};
double c{m_Centres[i]};
double c0{c};
double c0{c - m_Endpoints[0]};
knots.push_back(m_Endpoints[0]);
values.push_back(this->predict(i, time, c));
variances.push_back(this->variance(i));
Expand Down Expand Up @@ -549,26 +549,37 @@ bool CAdaptiveBucketing::knots(core_t::TTime time,
double alpha{m_Endpoints[n] - m_Centres[j]};
double beta{c0};
double Z{alpha + beta};
if (Z == 0.0) {
alpha = beta = 0.5;
} else {
alpha /= Z;
beta /= Z;
}
double lastPeriodValue{
this->predict(j, time, m_Centres[j] - m_Endpoints[n])};
double lastPeriodVariance{this->variance(j)};
knots[0] = m_Endpoints[0];
values[0] = (alpha * values[0] + beta * lastPeriodValue) / Z;
variances[0] = (alpha * variances[0] + beta * lastPeriodVariance) / Z;
values[0] = alpha * values[0] + beta * lastPeriodValue;
variances[0] = alpha * variances[0] + beta * lastPeriodVariance;
break;
}
}
for (std::size_t j = 0u; j < n; ++j) {
for (std::size_t j = 0; j < n; ++j) {
if (this->bucketCount(j) > 0.0) {
double alpha{m_Centres[j]};
double beta{m_Endpoints[n] - knots.back()};
double Z{alpha + beta};
if (Z == 0.0) {
alpha = beta = 0.5;
} else {
alpha /= Z;
beta /= Z;
}
double nextPeriodValue{
this->predict(j, time, m_Endpoints[n] + m_Centres[j])};
double nextPeriodVariance{this->variance(j)};
values.push_back((alpha * values.back() + beta * nextPeriodValue) / Z);
variances.push_back(
(alpha * variances.back() + beta * nextPeriodVariance) / Z);
values.push_back(alpha * values.back() + beta * nextPeriodValue);
variances.push_back(alpha * variances.back() + beta * nextPeriodVariance);
knots.push_back(m_Endpoints[n]);
break;
}
Expand Down
4 changes: 3 additions & 1 deletion lib/maths/CCalendarComponentAdaptiveBucketing.cc
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,9 @@ void CCalendarComponentAdaptiveBucketing::refresh(const TFloatVec& oldEndpoints)
static_cast<double>(oldCentres[l - 1]));
largeErrorCount += w * oldLargeErrorCounts[l - 1];
count += w * w * CBasicStatistics::count(m_Values[l - 1]);
double scale{count / CBasicStatistics::count(value)};
double scale{count == CBasicStatistics::count(value)
? 1.0
: count / CBasicStatistics::count(value)};
newValues.push_back(CBasicStatistics::scaled(value, scale));
newCentres.push_back(CTools::truncate(CBasicStatistics::mean(centre), yl, yr));
newLargeErrorCounts.push_back(largeErrorCount);
Expand Down
2 changes: 1 addition & 1 deletion lib/maths/CTimeSeriesDecomposition.cc
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,7 @@ TDoubleDoublePr CTimeSeriesDecomposition::scale(core_t::TTime time,
}

double mean{this->meanVariance()};
if (mean == 0.0) {
if (mean == 0.0 || variance == 0.0) {
return {1.0, 1.0};
}

Expand Down