Skip to content

Commit c8e5b3b

Browse files
authored
Update tests for datastream collector (#791)
- Remove up, totalScrapes, and jsonParseFailures metrics. They are not useful. - Move fixtures to individual files - Base tests on the metric output for better testing the expected output instead of the internals. Signed-off-by: Joe Adams <[email protected]>
1 parent a09cf6d commit c8e5b3b

File tree

3 files changed

+74
-59
lines changed

3 files changed

+74
-59
lines changed

collector/data_stream.go

-30
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,6 @@ type DataStream struct {
4646
client *http.Client
4747
url *url.URL
4848

49-
up prometheus.Gauge
50-
totalScrapes, jsonParseFailures prometheus.Counter
51-
5249
dataStreamMetrics []*dataStreamMetric
5350
}
5451

@@ -59,18 +56,6 @@ func NewDataStream(logger log.Logger, client *http.Client, url *url.URL) *DataSt
5956
client: client,
6057
url: url,
6158

62-
up: prometheus.NewGauge(prometheus.GaugeOpts{
63-
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "up"),
64-
Help: "Was the last scrape of the ElasticSearch Data Stream stats endpoint successful.",
65-
}),
66-
totalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
67-
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "total_scrapes"),
68-
Help: "Current total ElasticSearch Data STream scrapes.",
69-
}),
70-
jsonParseFailures: prometheus.NewCounter(prometheus.CounterOpts{
71-
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "json_parse_failures"),
72-
Help: "Number of errors while parsing JSON.",
73-
}),
7459
dataStreamMetrics: []*dataStreamMetric{
7560
{
7661
Type: prometheus.CounterValue,
@@ -105,10 +90,6 @@ func (ds *DataStream) Describe(ch chan<- *prometheus.Desc) {
10590
for _, metric := range ds.dataStreamMetrics {
10691
ch <- metric.Desc
10792
}
108-
109-
ch <- ds.up.Desc()
110-
ch <- ds.totalScrapes.Desc()
111-
ch <- ds.jsonParseFailures.Desc()
11293
}
11394

11495
func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse, error) {
@@ -138,12 +119,10 @@ func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse,
138119

139120
bts, err := io.ReadAll(res.Body)
140121
if err != nil {
141-
ds.jsonParseFailures.Inc()
142122
return dsr, err
143123
}
144124

145125
if err := json.Unmarshal(bts, &dsr); err != nil {
146-
ds.jsonParseFailures.Inc()
147126
return dsr, err
148127
}
149128

@@ -152,25 +131,16 @@ func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse,
152131

153132
// Collect gets DataStream metric values
154133
func (ds *DataStream) Collect(ch chan<- prometheus.Metric) {
155-
ds.totalScrapes.Inc()
156-
defer func() {
157-
ch <- ds.up
158-
ch <- ds.totalScrapes
159-
ch <- ds.jsonParseFailures
160-
}()
161134

162135
dataStreamStatsResp, err := ds.fetchAndDecodeDataStreamStats()
163136
if err != nil {
164-
ds.up.Set(0)
165137
level.Warn(ds.logger).Log(
166138
"msg", "failed to fetch and decode data stream stats",
167139
"err", err,
168140
)
169141
return
170142
}
171143

172-
ds.up.Set(1)
173-
174144
for _, metric := range ds.dataStreamMetrics {
175145
for _, dataStream := range dataStreamStatsResp.DataStreamStats {
176146
fmt.Printf("Metric: %+v", dataStream)

collector/data_stream_test.go

+50-29
Original file line numberDiff line numberDiff line change
@@ -14,44 +14,65 @@
1414
package collector
1515

1616
import (
17-
"fmt"
17+
"io"
1818
"net/http"
1919
"net/http/httptest"
2020
"net/url"
21+
"os"
22+
"strings"
2123
"testing"
2224

2325
"github.com/go-kit/log"
26+
"github.com/prometheus/client_golang/prometheus/testutil"
2427
)
2528

2629
func TestDataStream(t *testing.T) {
27-
tcs := map[string]string{
28-
"7.15.0": `{"_shards":{"total":30,"successful":30,"failed":0},"data_stream_count":2,"backing_indices":7,"total_store_size_bytes":1103028116,"data_streams":[{"data_stream":"foo","backing_indices":5,"store_size_bytes":429205396,"maximum_timestamp":1656079894000},{"data_stream":"bar","backing_indices":2,"store_size_bytes":673822720,"maximum_timestamp":1656028796000}]}`,
29-
}
30-
for ver, out := range tcs {
31-
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
32-
fmt.Fprintln(w, out)
33-
}))
34-
defer ts.Close()
35-
36-
u, err := url.Parse(ts.URL)
37-
if err != nil {
38-
t.Fatalf("Failed to parse URL: %s", err)
39-
}
40-
s := NewDataStream(log.NewNopLogger(), http.DefaultClient, u)
41-
stats, err := s.fetchAndDecodeDataStreamStats()
42-
if err != nil {
43-
t.Fatalf("Failed to fetch or decode data stream stats: %s", err)
44-
}
45-
t.Logf("[%s] Data Stream Response: %+v", ver, stats)
46-
dataStreamStats := stats.DataStreamStats[0]
47-
48-
if dataStreamStats.BackingIndices != 5 {
49-
t.Errorf("Bad number of backing indices")
50-
}
51-
52-
if dataStreamStats.StoreSizeBytes != 429205396 {
53-
t.Errorf("Bad store size bytes valuee")
54-
}
30+
31+
tests := []struct {
32+
name string
33+
file string
34+
want string
35+
}{
36+
{
37+
name: "7.15.0",
38+
file: "../fixtures/datastream/7.15.0.json",
39+
want: `# HELP elasticsearch_data_stream_backing_indices_total Number of backing indices
40+
# TYPE elasticsearch_data_stream_backing_indices_total counter
41+
elasticsearch_data_stream_backing_indices_total{data_stream="bar"} 2
42+
elasticsearch_data_stream_backing_indices_total{data_stream="foo"} 5
43+
# HELP elasticsearch_data_stream_store_size_bytes Store size of data stream
44+
# TYPE elasticsearch_data_stream_store_size_bytes counter
45+
elasticsearch_data_stream_store_size_bytes{data_stream="bar"} 6.7382272e+08
46+
elasticsearch_data_stream_store_size_bytes{data_stream="foo"} 4.29205396e+08
47+
`,
48+
},
5549
}
50+
for _, tt := range tests {
51+
t.Run(tt.name, func(t *testing.T) {
52+
f, err := os.Open(tt.file)
53+
if err != nil {
54+
t.Fatal(err)
55+
}
56+
defer f.Close()
57+
58+
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
59+
io.Copy(w, f)
60+
}))
61+
defer ts.Close()
5662

63+
u, err := url.Parse(ts.URL)
64+
if err != nil {
65+
t.Fatal(err)
66+
}
67+
68+
c := NewDataStream(log.NewNopLogger(), http.DefaultClient, u)
69+
if err != nil {
70+
t.Fatal(err)
71+
}
72+
73+
if err := testutil.CollectAndCompare(c, strings.NewReader(tt.want)); err != nil {
74+
t.Fatal(err)
75+
}
76+
})
77+
}
5778
}

fixtures/datastream/7.15.0.json

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
{
2+
"_shards": {
3+
"total": 30,
4+
"successful": 30,
5+
"failed": 0
6+
},
7+
"data_stream_count": 2,
8+
"backing_indices": 7,
9+
"total_store_size_bytes": 1103028116,
10+
"data_streams": [
11+
{
12+
"data_stream": "foo",
13+
"backing_indices": 5,
14+
"store_size_bytes": 429205396,
15+
"maximum_timestamp": 1656079894000
16+
},
17+
{
18+
"data_stream": "bar",
19+
"backing_indices": 2,
20+
"store_size_bytes": 673822720,
21+
"maximum_timestamp": 1656028796000
22+
}
23+
]
24+
}

0 commit comments

Comments
 (0)