|
| 1 | +/* |
| 2 | +Copyright 2021 Cortex Labs, Inc. |
| 3 | +
|
| 4 | +Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | +you may not use this file except in compliance with the License. |
| 6 | +You may obtain a copy of the License at |
| 7 | +
|
| 8 | + http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | +
|
| 10 | +Unless required by applicable law or agreed to in writing, software |
| 11 | +distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | +See the License for the specific language governing permissions and |
| 14 | +limitations under the License. |
| 15 | +*/ |
| 16 | + |
| 17 | +package dequeuer |
| 18 | + |
| 19 | +import ( |
| 20 | + "net/http/httptest" |
| 21 | + "strings" |
| 22 | + "testing" |
| 23 | + "time" |
| 24 | + |
| 25 | + "github.com/prometheus/client_golang/prometheus" |
| 26 | + "github.com/prometheus/client_golang/prometheus/promauto" |
| 27 | + "github.com/prometheus/client_golang/prometheus/testutil" |
| 28 | + "github.com/stretchr/testify/require" |
| 29 | +) |
| 30 | + |
| 31 | +func TestNewAsyncPrometheusStatsReporter(t *testing.T) { |
| 32 | + t.Parallel() |
| 33 | + |
| 34 | + statsReporter := NewAsyncPrometheusStatsReporter() |
| 35 | + |
| 36 | + statsReporter.HandleEvent( |
| 37 | + RequestEvent{ |
| 38 | + StatusCode: 200, |
| 39 | + Duration: 100 * time.Millisecond, |
| 40 | + }, |
| 41 | + ) |
| 42 | + |
| 43 | + w := httptest.NewRecorder() |
| 44 | + r := httptest.NewRequest("GET", "/metrics", nil) |
| 45 | + statsReporter.ServeHTTP(w, r) |
| 46 | + |
| 47 | + result := w.Body.String() |
| 48 | + require.Contains(t, result, "cortex_async_latency") |
| 49 | + require.Contains(t, result, "cortex_async_request_count") |
| 50 | +} |
| 51 | + |
| 52 | +func TestAsyncStatsReporter_HandleEvent(t *testing.T) { |
| 53 | + t.Parallel() |
| 54 | + |
| 55 | + reg := prometheus.NewRegistry() |
| 56 | + |
| 57 | + latenciesHist := promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{ |
| 58 | + Name: "cortex_async_latency", |
| 59 | + Help: "Histogram of the latencies for an AsyncAPI kind in seconds", |
| 60 | + }, []string{"status_code"}) |
| 61 | + |
| 62 | + requestCounter := promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ |
| 63 | + Name: "cortex_async_request_count", |
| 64 | + Help: "Request count for an AsyncAPI", |
| 65 | + }, []string{"status_code"}) |
| 66 | + |
| 67 | + statsReporter := AsyncStatsReporter{ |
| 68 | + latencies: latenciesHist, |
| 69 | + requestCount: requestCounter, |
| 70 | + } |
| 71 | + |
| 72 | + statsReporter.HandleEvent( |
| 73 | + RequestEvent{ |
| 74 | + StatusCode: 200, |
| 75 | + Duration: 100 * time.Millisecond, |
| 76 | + }, |
| 77 | + ) |
| 78 | + |
| 79 | + expectedHist := ` |
| 80 | +# HELP cortex_async_latency Histogram of the latencies for an AsyncAPI kind in seconds |
| 81 | +# TYPE cortex_async_latency histogram |
| 82 | +cortex_async_latency_bucket{status_code="200",le="0.005"} 0 |
| 83 | +cortex_async_latency_bucket{status_code="200",le="0.01"} 0 |
| 84 | +cortex_async_latency_bucket{status_code="200",le="0.025"} 0 |
| 85 | +cortex_async_latency_bucket{status_code="200",le="0.05"} 0 |
| 86 | +cortex_async_latency_bucket{status_code="200",le="0.1"} 1 |
| 87 | +cortex_async_latency_bucket{status_code="200",le="0.25"} 1 |
| 88 | +cortex_async_latency_bucket{status_code="200",le="0.5"} 1 |
| 89 | +cortex_async_latency_bucket{status_code="200",le="1"} 1 |
| 90 | +cortex_async_latency_bucket{status_code="200",le="2.5"} 1 |
| 91 | +cortex_async_latency_bucket{status_code="200",le="5"} 1 |
| 92 | +cortex_async_latency_bucket{status_code="200",le="10"} 1 |
| 93 | +cortex_async_latency_bucket{status_code="200",le="+Inf"} 1 |
| 94 | +cortex_async_latency_sum{status_code="200"} 0.1 |
| 95 | +cortex_async_latency_count{status_code="200"} 1 |
| 96 | +` |
| 97 | + |
| 98 | + require.Equal(t, float64(1), testutil.ToFloat64(statsReporter.requestCount)) |
| 99 | + require.NoError(t, testutil.CollectAndCompare(statsReporter.latencies, strings.NewReader(expectedHist))) |
| 100 | +} |
0 commit comments