Skip to content

Commit dfca9eb

Browse files
authored
[ML] Change logging level used in NLP request processing (#98497)
1 parent 81a4d89 commit dfca9eb

File tree

2 files changed

+7
-6
lines changed

2 files changed

+7
-6
lines changed

x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/InferencePyTorchAction.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ protected void doRun() throws Exception {
9090
NlpConfig nlpConfig = (NlpConfig) config;
9191
NlpTask.Request request = processor.getRequestBuilder(nlpConfig)
9292
.buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate(), nlpConfig.getTokenization().getSpan());
93-
logger.trace(() -> format("handling request [%s]", requestIdStr));
93+
logger.debug(() -> format("handling request [%s]", requestIdStr));
9494

9595
// Tokenization is non-trivial, so check for cancellation one last time before sending request to the native process
9696
if (isCancelled()) {
@@ -110,6 +110,7 @@ protected void doRun() throws Exception {
110110
logger.error(() -> "[" + getDeploymentId() + "] error writing to inference process", e);
111111
onFailure(ExceptionsHelper.serverError("Error writing to inference process", e));
112112
} catch (Exception e) {
113+
logger.error(() -> "[" + getDeploymentId() + "] error running inference", e);
113114
onFailure(e);
114115
}
115116
}
@@ -141,7 +142,7 @@ private void processResult(
141142
return;
142143
}
143144
InferenceResults results = inferenceResultsProcessor.processResult(tokenization, pyTorchResult.inferenceResult());
144-
logger.trace(() -> format("[%s] processed result for request [%s]", getDeploymentId(), getRequestId()));
145+
logger.debug(() -> format("[%s] processed result for request [%s]", getDeploymentId(), getRequestId()));
145146
onSuccess(results);
146147
}
147148

x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchResultProcessor.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ void processInferenceResult(PyTorchResult result) {
149149
timeMs = 0L;
150150
}
151151

152-
logger.trace(() -> format("[%s] Parsed inference result with id [%s]", modelId, result.requestId()));
152+
logger.debug(() -> format("[%s] Parsed inference result with id [%s]", modelId, result.requestId()));
153153
updateStats(timeMs, Boolean.TRUE.equals(result.isCacheHit()));
154154
PendingResult pendingResult = pendingResults.remove(result.requestId());
155155
if (pendingResult == null) {
@@ -163,7 +163,7 @@ void processThreadSettings(PyTorchResult result) {
163163
ThreadSettings threadSettings = result.threadSettings();
164164
assert threadSettings != null;
165165

166-
logger.trace(() -> format("[%s] Parsed thread settings result with id [%s]", modelId, result.requestId()));
166+
logger.debug(() -> format("[%s] Parsed thread settings result with id [%s]", modelId, result.requestId()));
167167
PendingResult pendingResult = pendingResults.remove(result.requestId());
168168
if (pendingResult == null) {
169169
logger.debug(() -> format("[%s] no pending result for thread settings [%s]", modelId, result.requestId()));
@@ -176,7 +176,7 @@ void processAcknowledgement(PyTorchResult result) {
176176
AckResult ack = result.ackResult();
177177
assert ack != null;
178178

179-
logger.trace(() -> format("[%s] Parsed ack result with id [%s]", modelId, result.requestId()));
179+
logger.debug(() -> format("[%s] Parsed ack result with id [%s]", modelId, result.requestId()));
180180
PendingResult pendingResult = pendingResults.remove(result.requestId());
181181
if (pendingResult == null) {
182182
logger.debug(() -> format("[%s] no pending result for ack [%s]", modelId, result.requestId()));
@@ -194,7 +194,7 @@ void processErrorResult(PyTorchResult result) {
194194
errorCount++;
195195
}
196196

197-
logger.trace(() -> format("[%s] Parsed error with id [%s]", modelId, result.requestId()));
197+
logger.debug(() -> format("[%s] Parsed error with id [%s]", modelId, result.requestId()));
198198
PendingResult pendingResult = pendingResults.remove(result.requestId());
199199
if (pendingResult == null) {
200200
logger.debug(() -> format("[%s] no pending result for error [%s]", modelId, result.requestId()));

0 commit comments

Comments
 (0)