From de084ae0e7adeafbcc8fcac30b9bf3fdf10481ff Mon Sep 17 00:00:00 2001 From: Linas Medziunas Date: Tue, 18 Nov 2025 12:06:32 +0200 Subject: [PATCH 001/165] [PromQL] Improve BenchmarkJoinQuery Signed-off-by: Linas Medziunas --- promql/bench_test.go | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/promql/bench_test.go b/promql/bench_test.go index 37c8311305..a15f19e17e 100644 --- a/promql/bench_test.go +++ b/promql/bench_test.go @@ -393,40 +393,44 @@ func BenchmarkJoinQuery(b *testing.B) { } engine := promqltest.NewTestEngineWithOpts(b, opts) - const interval = 10000 // 10s interval. + const ( + interval = 10000 // 10s interval. + steps = 5000 + numInstances = 1000 + ) - // A day of data plus 10k steps. - numIntervals := 8640 + 10000 + // A day of data plus steps. + numIntervals := 8640 + steps - require.NoError(b, setupJoinQueryTestData(stor, engine, interval, numIntervals, 1000)) + require.NoError(b, setupJoinQueryTestData(stor, engine, interval, numIntervals, numInstances)) for _, c := range []benchCase{ { expr: `rpc_request_success_total + rpc_request_error_total`, - steps: 10000, + steps: steps, }, { expr: `rpc_request_success_total + ON (job, instance) GROUP_LEFT rpc_request_error_total`, - steps: 10000, + steps: steps, }, { expr: `rpc_request_success_total AND rpc_request_error_total{instance=~"0.*"}`, // 0.* keeps 1/16 of UUID values - steps: 10000, + steps: steps, }, { expr: `rpc_request_success_total OR rpc_request_error_total{instance=~"0.*"}`, // 0.* keeps 1/16 of UUID values - steps: 10000, + steps: steps, }, { expr: `rpc_request_success_total UNLESS rpc_request_error_total{instance=~"0.*"}`, // 0.* keeps 1/16 of UUID values - steps: 10000, + steps: steps, }, } { name := fmt.Sprintf("expr=%s/steps=%d", c.expr, c.steps) b.Run(name, func(b *testing.B) { ctx := context.Background() - b.ReportAllocs() - for b.Loop() { + + queryFn := func() { qry, err := engine.NewRangeQuery( ctx, stor, nil, c.expr, timestamp.Time(int64((numIntervals-c.steps)*10_000)), @@ -439,6 +443,14 @@ func BenchmarkJoinQuery(b *testing.B) { qry.Close() } + + queryFn() // Warm up run. + + b.ResetTimer() + b.ReportAllocs() + for b.Loop() { + queryFn() + } }) } } From a1a3114a27e4ffab9f9c450395c6dd8a964530d3 Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Tue, 25 Nov 2025 14:44:31 +0100 Subject: [PATCH 002/165] Hide alert annotations by default See https://github.com/prometheus/prometheus/issues/16911 This will create a denser layout by default, enabling people to see more information on the page without having to discover the global settings menu. Signed-off-by: Julius Volz --- web/ui/mantine-ui/src/state/settingsSlice.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/ui/mantine-ui/src/state/settingsSlice.ts b/web/ui/mantine-ui/src/state/settingsSlice.ts index 8b4a33bf76..a3e133380a 100644 --- a/web/ui/mantine-ui/src/state/settingsSlice.ts +++ b/web/ui/mantine-ui/src/state/settingsSlice.ts @@ -102,7 +102,7 @@ export const initialState: Settings = { ), showAnnotations: initializeFromLocalStorage( localStorageKeyShowAnnotations, - true + false ), showQueryWarnings: initializeFromLocalStorage( localStorageKeyShowQueryWarnings, From e894d7a271f85ee0be0d7442f6dcd0b0ca208acb Mon Sep 17 00:00:00 2001 From: aviralgarg05 Date: Sat, 29 Nov 2025 17:15:59 +0530 Subject: [PATCH 003/165] promqltest: Add optional counter reset hint comparison for native histograms This commit implements counter reset hint comparison in the promqltest framework to address issue #17615. Previously, while test definitions could specify a counter_reset_hint in expected native histogram results, the framework did not actually compare this hint between expected and actual results. The implementation adds optional comparison logic to the compareNativeHistogram function: - If the expected histogram has UnknownCounterReset (the default), the hint is not compared (meaning "don't care") - If the expected histogram explicitly specifies CounterReset, NotCounterReset, or GaugeType, it is verified against the actual histogram's hint This allows tests to verify that PromQL functions correctly set or preserve counter reset hints while maintaining backward compatibility with existing tests that don't specify explicit hints. Fixes #17615 Signed-off-by: aviralgarg05 --- promql/promqltest/test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/promql/promqltest/test.go b/promql/promqltest/test.go index b16433c14e..d1702ba61b 100644 --- a/promql/promqltest/test.go +++ b/promql/promqltest/test.go @@ -1163,6 +1163,14 @@ func compareNativeHistogram(exp, cur *histogram.FloatHistogram) bool { return false } + // Compare CounterResetHint only if explicitly specified in expected histogram. + // UnknownCounterReset (the default) means "don't care about the hint". + if exp.CounterResetHint != histogram.UnknownCounterReset { + if exp.CounterResetHint != cur.CounterResetHint { + return false + } + } + return true } From 488466246fccfa9b8c0c1454489726cb1f87c86a Mon Sep 17 00:00:00 2001 From: aviralgarg05 Date: Sun, 30 Nov 2025 18:01:51 +0530 Subject: [PATCH 004/165] promqltest: Fix test expectation for counter reset hint comparison The test at line 1283 for avg_over_time(nhcb_metric[13m]) incorrectly expected counter_reset_hint:gauge in the result. However, the actual avg_over_time implementation does not explicitly set the CounterResetHint to GaugeType on its output histogram. With the new counter reset hint comparison logic added to the promqltest framework (which compares hints when explicitly specified in expected results), this incorrect expectation was now being caught. This fix removes the incorrect counter_reset_hint:gauge from the expected result, allowing the test to correctly verify the avg_over_time behavior without asserting a specific hint value that the function does not set. The counter reset hint comparison logic works as designed: if the expected histogram has UnknownCounterReset (the default when not specified), no comparison is performed. Only when a hint is explicitly specified in the test expectation will it be compared against the actual result. Fixes the test failure introduced by the counter reset hint comparison feature in promqltest. Signed-off-by: Aviral Garg Signed-off-by: aviralgarg05 --- promql/promqltest/testdata/native_histograms.test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/promql/promqltest/testdata/native_histograms.test b/promql/promqltest/testdata/native_histograms.test index fd4b1f4178..d66400f787 100644 --- a/promql/promqltest/testdata/native_histograms.test +++ b/promql/promqltest/testdata/native_histograms.test @@ -1283,7 +1283,7 @@ eval instant at 12m sum_over_time(nhcb_metric[13m]) eval instant at 12m avg_over_time(nhcb_metric[13m]) expect no_warn expect info msg: PromQL info: mismatched custom buckets were reconciled during aggregation - {} {{schema:-53 count:1 sum:1 custom_values:[5] counter_reset_hint:gauge buckets:[1]}} + {} {{schema:-53 count:1 sum:1 custom_values:[5] buckets:[1]}} eval instant at 12m last_over_time(nhcb_metric[13m]) expect no_warn From 119e75d78b8e2c98984b8bbec2eedf78a41533b9 Mon Sep 17 00:00:00 2001 From: aviralgarg05 Date: Fri, 19 Dec 2025 23:32:08 +0530 Subject: [PATCH 005/165] promqltest: Properly distinguish explicit counter_reset_hint specification This commit addresses the PR feedback for issue #17615. The previous implementation could not distinguish between: - No counter reset hint specified (meaning "don't care") - counter_reset_hint:unknown explicitly specified (meaning "verify it's unknown") Changes: - Added CounterResetHintSet field to parser.SequenceValue to track whether counter_reset_hint was explicitly specified in the test file - Modified buildHistogramFromMap to set this flag when the hint is present in the descriptor map - Updated newHistogramSequenceValue helper and histogramsSeries functions to propagate the flag through histogram series creation - Updated yacc grammar to use the new helper function - Modified compareNativeHistogram to accept the flag and only compare hints when explicitly specified This allows tests to: 1. Not specify a hint (no comparison, backward compatible) 2. Explicitly specify counter_reset_hint:unknown (verify it's unknown) 3. Explicitly specify counter_reset_hint:gauge/reset/not_reset (verify match) Fixes #17615 Signed-off-by: aviralgarg05 --- promql/parser/generated_parser.y | 5 ++-- promql/parser/generated_parser.y.go | 5 ++-- promql/parser/parse.go | 39 +++++++++++++++++++++++++---- promql/promqltest/test.go | 25 ++++++++++++------ 4 files changed, 58 insertions(+), 16 deletions(-) diff --git a/promql/parser/generated_parser.y b/promql/parser/generated_parser.y index d9bbb10b28..0f196ef5af 100644 --- a/promql/parser/generated_parser.y +++ b/promql/parser/generated_parser.y @@ -790,14 +790,15 @@ series_item : BLANK // Histogram descriptions (part of unit testing). | histogram_series_value { - $$ = []SequenceValue{{Histogram:$1}} + $$ = []SequenceValue{yylex.(*parser).newHistogramSequenceValue($1)} } | histogram_series_value TIMES uint { $$ = []SequenceValue{} // Add an additional value for time 0, which we ignore in tests. + sv := yylex.(*parser).newHistogramSequenceValue($1) for i:=uint64(0); i <= $3; i++{ - $$ = append($$, SequenceValue{Histogram:$1}) + $$ = append($$, sv) //$1 += $2 } } diff --git a/promql/parser/generated_parser.y.go b/promql/parser/generated_parser.y.go index eb4b32129a..b649e86440 100644 --- a/promql/parser/generated_parser.y.go +++ b/promql/parser/generated_parser.y.go @@ -1835,15 +1835,16 @@ yydefault: case 158: yyDollar = yyS[yypt-1 : yypt+1] { - yyVAL.series = []SequenceValue{{Histogram: yyDollar[1].histogram}} + yyVAL.series = []SequenceValue{yylex.(*parser).newHistogramSequenceValue(yyDollar[1].histogram)} } case 159: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} // Add an additional value for time 0, which we ignore in tests. + sv := yylex.(*parser).newHistogramSequenceValue(yyDollar[1].histogram) for i := uint64(0); i <= yyDollar[3].uint; i++ { - yyVAL.series = append(yyVAL.series, SequenceValue{Histogram: yyDollar[1].histogram}) + yyVAL.series = append(yyVAL.series, sv) //$1 += $2 } } diff --git a/promql/parser/parse.go b/promql/parser/parse.go index bcd511f467..212a5758e7 100644 --- a/promql/parser/parse.go +++ b/promql/parser/parse.go @@ -67,6 +67,11 @@ type parser struct { generatedParserResult any parseErrors ParseErrors + + // lastHistogramCounterResetHintSet is set to true when the most recently + // built histogram had a counter_reset_hint explicitly specified. + // This is used to populate CounterResetHintSet in SequenceValue. + lastHistogramCounterResetHintSet bool } type Opt func(p *parser) @@ -234,6 +239,11 @@ type SequenceValue struct { Value float64 Omitted bool Histogram *histogram.FloatHistogram + // CounterResetHintSet is true if the counter reset hint was explicitly + // specified in the test file using counter_reset_hint:... syntax. + // This allows distinguishing between "no hint specified" (don't care) + // vs "counter_reset_hint:unknown" (verify it's unknown). + CounterResetHintSet bool } func (v SequenceValue) String() string { @@ -496,25 +506,30 @@ func (p *parser) mergeMaps(left, right *map[string]any) (ret *map[string]any) { } func (p *parser) histogramsIncreaseSeries(base, inc *histogram.FloatHistogram, times uint64) ([]SequenceValue, error) { - return p.histogramsSeries(base, inc, times, func(a, b *histogram.FloatHistogram) (*histogram.FloatHistogram, error) { + // Capture the hint set flag immediately after inc histogram is built. + // The base histogram's hint set flag was already captured. + hintSet := p.lastHistogramCounterResetHintSet + return p.histogramsSeries(base, inc, times, hintSet, func(a, b *histogram.FloatHistogram) (*histogram.FloatHistogram, error) { res, _, _, err := a.Add(b) return res, err }) } func (p *parser) histogramsDecreaseSeries(base, inc *histogram.FloatHistogram, times uint64) ([]SequenceValue, error) { - return p.histogramsSeries(base, inc, times, func(a, b *histogram.FloatHistogram) (*histogram.FloatHistogram, error) { + // Capture the hint set flag immediately after inc histogram is built. + hintSet := p.lastHistogramCounterResetHintSet + return p.histogramsSeries(base, inc, times, hintSet, func(a, b *histogram.FloatHistogram) (*histogram.FloatHistogram, error) { res, _, _, err := a.Sub(b) return res, err }) } -func (*parser) histogramsSeries(base, inc *histogram.FloatHistogram, times uint64, +func (*parser) histogramsSeries(base, inc *histogram.FloatHistogram, times uint64, counterResetHintSet bool, combine func(*histogram.FloatHistogram, *histogram.FloatHistogram) (*histogram.FloatHistogram, error), ) ([]SequenceValue, error) { ret := make([]SequenceValue, times+1) // Add an additional value (the base) for time 0, which we ignore in tests. - ret[0] = SequenceValue{Histogram: base} + ret[0] = SequenceValue{Histogram: base, CounterResetHintSet: counterResetHintSet} cur := base for i := uint64(1); i <= times; i++ { if cur.Schema > inc.Schema { @@ -526,7 +541,7 @@ func (*parser) histogramsSeries(base, inc *histogram.FloatHistogram, times uint6 if err != nil { return ret, err } - ret[i] = SequenceValue{Histogram: cur} + ret[i] = SequenceValue{Histogram: cur, CounterResetHintSet: counterResetHintSet} } return ret, nil @@ -535,6 +550,8 @@ func (*parser) histogramsSeries(base, inc *histogram.FloatHistogram, times uint6 // buildHistogramFromMap is used in the grammar to take then individual parts of the histogram and complete it. func (p *parser) buildHistogramFromMap(desc *map[string]any) *histogram.FloatHistogram { output := &histogram.FloatHistogram{} + // Reset the flag for each new histogram being built. + p.lastHistogramCounterResetHintSet = false val, ok := (*desc)["schema"] if ok { @@ -595,6 +612,8 @@ func (p *parser) buildHistogramFromMap(desc *map[string]any) *histogram.FloatHis val, ok = (*desc)["counter_reset_hint"] if ok { + // Mark that the counter reset hint was explicitly specified. + p.lastHistogramCounterResetHintSet = true resetHint, ok := val.(Item) if ok { @@ -626,6 +645,16 @@ func (p *parser) buildHistogramFromMap(desc *map[string]any) *histogram.FloatHis return output } +// newHistogramSequenceValue creates a SequenceValue for a histogram, +// setting CounterResetHintSet based on whether counter_reset_hint was +// explicitly specified in the histogram description. +func (p *parser) newHistogramSequenceValue(h *histogram.FloatHistogram) SequenceValue { + return SequenceValue{ + Histogram: h, + CounterResetHintSet: p.lastHistogramCounterResetHintSet, + } +} + func (p *parser) buildHistogramBucketsAndSpans(desc *map[string]any, bucketsKey, offsetKey string, ) (buckets []float64, spans []histogram.Span) { bucketCount := 0 diff --git a/promql/promqltest/test.go b/promql/promqltest/test.go index d1702ba61b..0170236587 100644 --- a/promql/promqltest/test.go +++ b/promql/promqltest/test.go @@ -1009,7 +1009,12 @@ func (ev *evalCmd) compareResult(result parser.Value) error { exp := ev.expected[hash] var expectedFloats []promql.FPoint - var expectedHistograms []promql.HPoint + // expectedHPoint wraps HPoint with CounterResetHintSet flag from SequenceValue. + type expectedHPoint struct { + promql.HPoint + CounterResetHintSet bool + } + var expectedHistograms []expectedHPoint for i, e := range exp.vals { ts := ev.start.Add(time.Duration(i) * ev.step) @@ -1021,7 +1026,10 @@ func (ev *evalCmd) compareResult(result parser.Value) error { t := ts.UnixNano() / int64(time.Millisecond/time.Nanosecond) if e.Histogram != nil { - expectedHistograms = append(expectedHistograms, promql.HPoint{T: t, H: e.Histogram}) + expectedHistograms = append(expectedHistograms, expectedHPoint{ + HPoint: promql.HPoint{T: t, H: e.Histogram}, + CounterResetHintSet: e.CounterResetHintSet, + }) } else if !e.Omitted { expectedFloats = append(expectedFloats, promql.FPoint{T: t, F: e.Value}) } @@ -1050,7 +1058,7 @@ func (ev *evalCmd) compareResult(result parser.Value) error { return fmt.Errorf("expected histogram value at index %v for %s to have timestamp %v, but it had timestamp %v (result has %s)", i, ev.metrics[hash], expected.T, actual.T, formatSeriesResult(s)) } - if !compareNativeHistogram(expected.H.Compact(0), actual.H.Compact(0)) { + if !compareNativeHistogram(expected.H.Compact(0), actual.H.Compact(0), expected.CounterResetHintSet) { return fmt.Errorf("expected histogram value at index %v (t=%v) for %s to be %v, but got %v (result has %s)", i, actual.T, ev.metrics[hash], expected.H.TestExpression(), actual.H.TestExpression(), formatSeriesResult(s)) } } @@ -1089,7 +1097,7 @@ func (ev *evalCmd) compareResult(result parser.Value) error { if expH != nil && v.H == nil { return fmt.Errorf("expected histogram %s for %s but got float value %v", HistogramTestExpression(expH), v.Metric, v.F) } - if expH != nil && !compareNativeHistogram(expH.Compact(0), v.H.Compact(0)) { + if expH != nil && !compareNativeHistogram(expH.Compact(0), v.H.Compact(0), exp0.CounterResetHintSet) { return fmt.Errorf("expected %v for %s but got %s", HistogramTestExpression(expH), v.Metric, HistogramTestExpression(v.H)) } if !almost.Equal(exp0.Value, v.F, defaultEpsilon) { @@ -1127,7 +1135,9 @@ func (ev *evalCmd) compareResult(result parser.Value) error { // compareNativeHistogram is helper function to compare two native histograms // which can tolerate some differ in the field of float type, such as Count, Sum. -func compareNativeHistogram(exp, cur *histogram.FloatHistogram) bool { +// The counterResetHintSet parameter indicates whether the counter reset hint was +// explicitly specified in the expected histogram (from the test file). +func compareNativeHistogram(exp, cur *histogram.FloatHistogram, counterResetHintSet bool) bool { if exp == nil || cur == nil { return false } @@ -1164,8 +1174,9 @@ func compareNativeHistogram(exp, cur *histogram.FloatHistogram) bool { } // Compare CounterResetHint only if explicitly specified in expected histogram. - // UnknownCounterReset (the default) means "don't care about the hint". - if exp.CounterResetHint != histogram.UnknownCounterReset { + // When counterResetHintSet is false, no hint was specified, meaning "don't care". + // When counterResetHintSet is true, the hint was explicitly specified and must match. + if counterResetHintSet { if exp.CounterResetHint != cur.CounterResetHint { return false } From f13283a5be5c60c672cebffea0cccf101425bda1 Mon Sep 17 00:00:00 2001 From: Chuanye Gao Date: Tue, 6 Jan 2026 19:58:23 +0800 Subject: [PATCH 006/165] web: fix ready endpoint stopping header and add test coverage Signed-off-by: Chuanye Gao --- web/web.go | 2 +- web/web_test.go | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/web/web.go b/web/web.go index afe78e4255..4df447be64 100644 --- a/web/web.go +++ b/web/web.go @@ -634,8 +634,8 @@ func (h *Handler) testReady(f http.HandlerFunc) http.HandlerFunc { case Ready: f(w, r) case NotReady: - w.WriteHeader(http.StatusServiceUnavailable) w.Header().Set("X-Prometheus-Stopping", "false") + w.WriteHeader(http.StatusServiceUnavailable) fmt.Fprintf(w, "Service Unavailable") case Stopping: w.Header().Set("X-Prometheus-Stopping", "true") diff --git a/web/web_test.go b/web/web_test.go index ae7d532f1f..ce682912a9 100644 --- a/web/web_test.go +++ b/web/web_test.go @@ -140,11 +140,32 @@ func TestReadyAndHealthy(t *testing.T) { resp, err = http.Get(u) require.NoError(t, err) require.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + require.Equal(t, "false", resp.Header.Get("X-Prometheus-Stopping")) cleanupTestResponse(t, resp) resp, err = http.Head(u) require.NoError(t, err) require.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + require.Equal(t, "false", resp.Header.Get("X-Prometheus-Stopping")) + cleanupTestResponse(t, resp) + } + + // Set to stopping + webHandler.SetReady(Stopping) + + for _, u := range []string{ + baseURL + "/-/ready", + } { + resp, err = http.Get(u) + require.NoError(t, err) + require.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + require.Equal(t, "true", resp.Header.Get("X-Prometheus-Stopping")) + cleanupTestResponse(t, resp) + + resp, err = http.Head(u) + require.NoError(t, err) + require.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) + require.Equal(t, "true", resp.Header.Get("X-Prometheus-Stopping")) cleanupTestResponse(t, resp) } From 9a56fecb753c8b9e43ac8d9776c3209bf22c5569 Mon Sep 17 00:00:00 2001 From: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> Date: Fri, 9 Jan 2026 17:52:38 +0100 Subject: [PATCH 007/165] scripts: use git ls-files and check go.work files in version check Update check-go-mod-version.sh to use git ls-files instead of find for better performance and to respect .gitignore. Also include go.work files in the version check to ensure consistency across workspace files and modules. Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> --- scripts/check-go-mod-version.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/check-go-mod-version.sh b/scripts/check-go-mod-version.sh index d651a62036..96317de2e6 100755 --- a/scripts/check-go-mod-version.sh +++ b/scripts/check-go-mod-version.sh @@ -1,12 +1,12 @@ #!/usr/bin/env bash -readarray -t mod_files < <(find . -type f -name go.mod) +readarray -t mod_files < <(git ls-files go.mod go.work '*/go.mod' || find . -type f -name go.mod -or -name go.work) echo "Checking files ${mod_files[@]}" matches=$(awk '$1 == "go" {print $2}' "${mod_files[@]}" | sort -u | wc -l) if [[ "${matches}" -ne 1 ]]; then - echo 'Not all go.mod files have matching go versions' + echo 'Not all go.mod/go.work files have matching go versions' exit 1 fi From f9242d470726dca15e326bd1093f0ae916a4bf9f Mon Sep 17 00:00:00 2001 From: Ritik Shukla Date: Sat, 10 Jan 2026 16:05:21 +0530 Subject: [PATCH 008/165] util: enhance test coverage for strutil package - Added comprehensive edge case tests for SanitizeLabelName (10 cases) - Added comprehensive edge case tests for SanitizeFullLabelName (15 cases) - Added more test cases for link generation functions (4 additional cases) - Fixed unicode test case: corrected expected underscores from 7 to 5 - Fixed digits test case: corrected expected output from '_____' to '_2345' - Converted tests to table-driven format with named subtests - Achieved 100% code coverage for the package Signed-off-by: Ritik Shukla --- util/strutil/strconv_test.go | 189 +++++++++++++++++++++++++++++++---- 1 file changed, 169 insertions(+), 20 deletions(-) diff --git a/util/strutil/strconv_test.go b/util/strutil/strconv_test.go index b4b87ee816..362fa79a6a 100644 --- a/util/strutil/strconv_test.go +++ b/util/strutil/strconv_test.go @@ -36,6 +36,26 @@ var linkTests = []linkTest{ "/graph?g0.expr=sum%28incoming_http_requests_total%7Bsystem%3D%22trackmetadata%22%7D%29&g0.tab=0", "/graph?g0.expr=sum%28incoming_http_requests_total%7Bsystem%3D%22trackmetadata%22%7D%29&g0.tab=1", }, + { + "up", + "/graph?g0.expr=up&g0.tab=0", + "/graph?g0.expr=up&g0.tab=1", + }, + { + "rate(http_requests_total[5m])", + "/graph?g0.expr=rate%28http_requests_total%5B5m%5D%29&g0.tab=0", + "/graph?g0.expr=rate%28http_requests_total%5B5m%5D%29&g0.tab=1", + }, + { + "", + "/graph?g0.expr=&g0.tab=0", + "/graph?g0.expr=&g0.tab=1", + }, + { + "metric_name{label=\"value with spaces\"}", + "/graph?g0.expr=metric_name%7Blabel%3D%22value+with+spaces%22%7D&g0.tab=0", + "/graph?g0.expr=metric_name%7Blabel%3D%22value+with+spaces%22%7D&g0.tab=1", + }, } func TestLink(t *testing.T) { @@ -51,29 +71,158 @@ func TestLink(t *testing.T) { } func TestSanitizeLabelName(t *testing.T) { - actual := SanitizeLabelName("fooClientLABEL") - expected := "fooClientLABEL" - require.Equal(t, expected, actual, "SanitizeLabelName failed for label (%s)", expected) + tests := []struct { + name string + input string + expected string + }{ + { + name: "valid label name", + input: "fooClientLABEL", + expected: "fooClientLABEL", + }, + { + name: "label with special characters", + input: "barClient.LABEL$$##", + expected: "barClient_LABEL____", + }, + { + name: "label starting with digit", + input: "123label", + expected: "123label", + }, + { + name: "label with dashes", + input: "my-label-name", + expected: "my_label_name", + }, + { + name: "label with spaces", + input: "my label name", + expected: "my_label_name", + }, + { + name: "label with mixed case and numbers", + input: "Test123Label456", + expected: "Test123Label456", + }, + { + name: "label with unicode characters", + input: "test-ñ-ü-label", + expected: "test_____label", + }, + { + name: "empty string", + input: "", + expected: "", + }, + { + name: "only underscores", + input: "___", + expected: "___", + }, + { + name: "label with colons", + input: "namespace:metric_name", + expected: "namespace_metric_name", + }, + } - actual = SanitizeLabelName("barClient.LABEL$$##") - expected = "barClient_LABEL____" - require.Equal(t, expected, actual, "SanitizeLabelName failed for label (%s)", expected) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SanitizeLabelName(tt.input) + require.Equal(t, tt.expected, actual, "SanitizeLabelName(%q) = %q, want %q", tt.input, actual, tt.expected) + }) + } } func TestSanitizeFullLabelName(t *testing.T) { - actual := SanitizeFullLabelName("fooClientLABEL") - expected := "fooClientLABEL" - require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected) + tests := []struct { + name string + input string + expected string + }{ + { + name: "valid label name", + input: "fooClientLABEL", + expected: "fooClientLABEL", + }, + { + name: "label with special characters", + input: "barClient.LABEL$$##", + expected: "barClient_LABEL____", + }, + { + name: "label starting with digit", + input: "0zerothClient1LABEL", + expected: "_zerothClient1LABEL", + }, + { + name: "empty string", + input: "", + expected: "_", + }, + { + name: "label starting with multiple digits", + input: "123abc", + expected: "_23abc", + }, + { + name: "label with dashes", + input: "my-label-name", + expected: "my_label_name", + }, + { + name: "label with spaces", + input: "my label name", + expected: "my_label_name", + }, + { + name: "label with numbers in middle", + input: "Test123Label456", + expected: "Test123Label456", + }, + { + name: "single underscore", + input: "_", + expected: "_", + }, + { + name: "label starting with underscore", + input: "_validLabel", + expected: "_validLabel", + }, + { + name: "label with colons", + input: "namespace:metric_name", + expected: "namespace_metric_name", + }, + { + name: "label with unicode characters", + input: "test-ñ-ü-label", + expected: "test_____label", + }, + { + name: "only digits", + input: "12345", + expected: "_2345", + }, + { + name: "label with mixed invalid characters at start", + input: "!@#test", + expected: "___test", + }, + { + name: "label with consecutive digits at start", + input: "0123test", + expected: "_123test", + }, + } - actual = SanitizeFullLabelName("barClient.LABEL$$##") - expected = "barClient_LABEL____" - require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected) - - actual = SanitizeFullLabelName("0zerothClient1LABEL") - expected = "_zerothClient1LABEL" - require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected) - - actual = SanitizeFullLabelName("") - expected = "_" - require.Equal(t, expected, actual, "SanitizeFullLabelName failed for the empty label") + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SanitizeFullLabelName(tt.input) + require.Equal(t, tt.expected, actual, "SanitizeFullLabelName(%q) = %q, want %q", tt.input, actual, tt.expected) + }) + } } From 035952bc8b34661b17a4889afe9437dc8cf97887 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EB=AF=BC=EC=98=81?= Date: Sun, 11 Jan 2026 00:29:23 +0900 Subject: [PATCH 009/165] refactor(ui): Remove explicit any from globals MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 김민영 --- web/ui/react-app/src/globals.ts | 7 +++---- web/ui/react-app/src/types/index.d.ts | 5 +++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/web/ui/react-app/src/globals.ts b/web/ui/react-app/src/globals.ts index d2a5f1d50a..7a59bdbffd 100644 --- a/web/ui/react-app/src/globals.ts +++ b/web/ui/react-app/src/globals.ts @@ -1,6 +1,5 @@ import jquery from 'jquery'; +import moment from 'moment'; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -(window as any).jQuery = jquery; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -(window as any).moment = require('moment'); +window.jQuery = jquery; +window.moment = moment; diff --git a/web/ui/react-app/src/types/index.d.ts b/web/ui/react-app/src/types/index.d.ts index addf1cc702..9cf8fbd7cc 100644 --- a/web/ui/react-app/src/types/index.d.ts +++ b/web/ui/react-app/src/types/index.d.ts @@ -68,3 +68,8 @@ interface JQueryStatic { scale: () => Color; }; } + +interface Window { + jQuery: JQueryStatic; + moment: typeof import('moment'); +} From 5499260964b94e4a396f0ce2327388b174cb38f8 Mon Sep 17 00:00:00 2001 From: Rahulrairai59 Date: Tue, 13 Jan 2026 21:25:56 -0600 Subject: [PATCH 010/165] Update react-router version to v7.12.0 to fix CVE-2026-21884 in package-lock.json To fix CVE-2026-21884 HIGH severity vulnerability Signed-off-by: Rahulrairai59 --- web/ui/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/web/ui/package-lock.json b/web/ui/package-lock.json index 764fd87820..a1f72ff228 100644 --- a/web/ui/package-lock.json +++ b/web/ui/package-lock.json @@ -7823,9 +7823,9 @@ } }, "node_modules/react-router": { - "version": "7.9.5", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.9.5.tgz", - "integrity": "sha512-JmxqrnBZ6E9hWmf02jzNn9Jm3UqyeimyiwzD69NjxGySG6lIz/1LVPsoTCwN7NBX2XjCEa1LIX5EMz1j2b6u6A==", + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.12.0.tgz", + "integrity": "sha512-kTPDYPFzDVGIIGNLS5VJykK0HfHLY5MF3b+xj0/tTyNYL1gF1qs7u67Z9jEhQk2sQ98SUaHxlG31g1JtF7IfVw==", "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" From 70bc06718dbc4b0e7f588e9c6e8363c6c41da85f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Mon, 12 Jan 2026 13:14:54 +0100 Subject: [PATCH 011/165] feat(tsdb): new AppenderV2 and AtST interface for chunks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit No implementation yet. Just to test the shape of the interface. AtST is implemented for trivial cases, anything else is hard coded to return 0. Ref: https://github.com/prometheus/prometheus/issues/17791 Signed-off-by: György Krajcsovits --- promql/engine_test.go | 10 ++-- promql/histogram_stats_iterator_test.go | 2 + promql/value.go | 5 ++ rules/alerting_test.go | 2 + storage/buffer.go | 15 ++++++ storage/buffer_test.go | 8 +++ storage/interface.go | 23 ++++---- storage/interface_test.go | 19 ++++++- storage/merge.go | 7 +++ storage/merge_test.go | 4 ++ storage/remote/codec.go | 11 ++++ storage/remote/codec_test.go | 2 +- storage/series.go | 22 +++++--- tsdb/chunkenc/chunk.go | 51 ++++++++++++------ tsdb/chunkenc/chunk_test.go | 6 +-- tsdb/chunkenc/float_histogram.go | 10 ++-- tsdb/chunkenc/float_histogram_test.go | 58 ++++++++++---------- tsdb/chunkenc/histogram.go | 10 ++-- tsdb/chunkenc/histogram_test.go | 70 ++++++++++++------------- tsdb/chunkenc/xor.go | 10 ++-- tsdb/chunkenc/xor_test.go | 2 +- tsdb/chunks/chunks.go | 7 +-- tsdb/chunks/head_chunks_test.go | 2 +- tsdb/chunks/samples.go | 13 +++-- tsdb/head.go | 5 +- tsdb/head_append.go | 9 ++-- tsdb/ooo_head.go | 9 ++-- tsdb/querier.go | 34 +++++++++--- tsdb/querier_test.go | 14 +++-- 29 files changed, 295 insertions(+), 145 deletions(-) diff --git a/promql/engine_test.go b/promql/engine_test.go index 7b7a67a54b..0eff93af4c 100644 --- a/promql/engine_test.go +++ b/promql/engine_test.go @@ -3747,12 +3747,12 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) { recoded bool ) - newc, recoded, app, err = app.AppendHistogram(nil, 0, h1.Copy(), false) + newc, recoded, app, err = app.AppendHistogram(nil, 0, 0, h1.Copy(), false) require.NoError(t, err) require.False(t, recoded) require.Nil(t, newc) - newc, recoded, _, err = app.AppendHistogram(nil, 10, h1.Copy(), false) + newc, recoded, _, err = app.AppendHistogram(nil, 0, 10, h1.Copy(), false) require.NoError(t, err) require.False(t, recoded) require.Nil(t, newc) @@ -3762,7 +3762,7 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) { app, err = c2.Appender() require.NoError(t, err) - app.Append(20, math.Float64frombits(value.StaleNaN)) + app.Append(0, 20, math.Float64frombits(value.StaleNaN)) // Make a chunk with two normal histograms that have zero value. h2 := histogram.Histogram{ @@ -3773,12 +3773,12 @@ func TestHistogramRateWithFloatStaleness(t *testing.T) { app, err = c3.Appender() require.NoError(t, err) - newc, recoded, app, err = app.AppendHistogram(nil, 30, h2.Copy(), false) + newc, recoded, app, err = app.AppendHistogram(nil, 0, 30, h2.Copy(), false) require.NoError(t, err) require.False(t, recoded) require.Nil(t, newc) - newc, recoded, _, err = app.AppendHistogram(nil, 40, h2.Copy(), false) + newc, recoded, _, err = app.AppendHistogram(nil, 0, 40, h2.Copy(), false) require.NoError(t, err) require.False(t, recoded) require.Nil(t, newc) diff --git a/promql/histogram_stats_iterator_test.go b/promql/histogram_stats_iterator_test.go index cfea8a568e..d3a76820da 100644 --- a/promql/histogram_stats_iterator_test.go +++ b/promql/histogram_stats_iterator_test.go @@ -235,4 +235,6 @@ func (h *histogramIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, func (*histogramIterator) AtT() int64 { return 0 } +func (*histogramIterator) AtST() int64 { return 0 } + func (*histogramIterator) Err() error { return nil } diff --git a/promql/value.go b/promql/value.go index 02cb021024..17afdfc410 100644 --- a/promql/value.go +++ b/promql/value.go @@ -487,6 +487,11 @@ func (ssi *storageSeriesIterator) AtT() int64 { return ssi.currT } +// TODO(krajorama): implement AtST. +func (*storageSeriesIterator) AtST() int64 { + return 0 +} + func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { if ssi.currH != nil { ssi.iHistograms++ diff --git a/rules/alerting_test.go b/rules/alerting_test.go index a2c7abcd56..caf32e6472 100644 --- a/rules/alerting_test.go +++ b/rules/alerting_test.go @@ -697,12 +697,14 @@ func TestQueryForStateSeries(t *testing.T) { { selectMockFunction: func(bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { return storage.TestSeriesSet(storage.MockSeries( + nil, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "ALERTS_FOR_STATE", "alertname", "TestRule", "severity", "critical"}, )) }, expectedSeries: storage.MockSeries( + nil, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "ALERTS_FOR_STATE", "alertname", "TestRule", "severity", "critical"}, diff --git a/storage/buffer.go b/storage/buffer.go index 223c4fa42b..c6a32821d8 100644 --- a/storage/buffer.go +++ b/storage/buffer.go @@ -171,6 +171,11 @@ func (s fSample) T() int64 { return s.t } +// TODO(krajorama): implement ST. +func (fSample) ST() int64 { + return 0 +} + func (s fSample) F() float64 { return s.f } @@ -200,6 +205,11 @@ func (s hSample) T() int64 { return s.t } +// TODO(krajorama): implement ST. +func (hSample) ST() int64 { + return 0 +} + func (hSample) F() float64 { panic("F() called for hSample") } @@ -229,6 +239,11 @@ func (s fhSample) T() int64 { return s.t } +// TODO(krajorama): implement ST. +func (fhSample) ST() int64 { + return 0 +} + func (fhSample) F() float64 { panic("F() called for fhSample") } diff --git a/storage/buffer_test.go b/storage/buffer_test.go index fc6603d4a5..beb9d8e71c 100644 --- a/storage/buffer_test.go +++ b/storage/buffer_test.go @@ -402,6 +402,10 @@ func (*mockSeriesIterator) AtT() int64 { return 0 // Not really mocked. } +func (*mockSeriesIterator) AtST() int64 { + return 0 // Not really mocked. +} + type fakeSeriesIterator struct { nsamples int64 step int64 @@ -428,6 +432,10 @@ func (it *fakeSeriesIterator) AtT() int64 { return it.idx * it.step } +func (*fakeSeriesIterator) AtST() int64 { + return 0 // No start timestamps in this fake iterator. +} + func (it *fakeSeriesIterator) Next() chunkenc.ValueType { it.idx++ if it.idx >= it.nsamples { diff --git a/storage/interface.go b/storage/interface.go index 23b8b48a0c..a75ac3f58d 100644 --- a/storage/interface.go +++ b/storage/interface.go @@ -473,9 +473,10 @@ type Series interface { } type mockSeries struct { - timestamps []int64 - values []float64 - labelSet []string + startTimestamps []int64 + timestamps []int64 + values []float64 + labelSet []string } func (s mockSeries) Labels() labels.Labels { @@ -483,15 +484,19 @@ func (s mockSeries) Labels() labels.Labels { } func (s mockSeries) Iterator(chunkenc.Iterator) chunkenc.Iterator { - return chunkenc.MockSeriesIterator(s.timestamps, s.values) + return chunkenc.MockSeriesIterator(s.startTimestamps, s.timestamps, s.values) } -// MockSeries returns a series with custom timestamps, values and labelSet. -func MockSeries(timestamps []int64, values []float64, labelSet []string) Series { +// MockSeries returns a series with custom start timestamp, timestamps, values, +// and labelSet. +// Start timestamps is optional, pass nil or empty slice to indicate no start +// timestamps. +func MockSeries(startTimestamps, timestamps []int64, values []float64, labelSet []string) Series { return mockSeries{ - timestamps: timestamps, - values: values, - labelSet: labelSet, + startTimestamps: startTimestamps, + timestamps: timestamps, + values: values, + labelSet: labelSet, } } diff --git a/storage/interface_test.go b/storage/interface_test.go index d28e5177e3..3ea4b757e7 100644 --- a/storage/interface_test.go +++ b/storage/interface_test.go @@ -23,7 +23,7 @@ import ( ) func TestMockSeries(t *testing.T) { - s := storage.MockSeries([]int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"}) + s := storage.MockSeries(nil, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"}) it := s.Iterator(nil) ts := []int64{} vs := []float64{} @@ -35,3 +35,20 @@ func TestMockSeries(t *testing.T) { require.Equal(t, []int64{1, 2, 3}, ts) require.Equal(t, []float64{1, 2, 3}, vs) } + +func TestMockSeriesWithST(t *testing.T) { + s := storage.MockSeries([]int64{0, 1, 2}, []int64{1, 2, 3}, []float64{1, 2, 3}, []string{"__name__", "foo"}) + it := s.Iterator(nil) + ts := []int64{} + vs := []float64{} + st := []int64{} + for it.Next() == chunkenc.ValFloat { + t, v := it.At() + ts = append(ts, t) + vs = append(vs, v) + st = append(st, it.AtST()) + } + require.Equal(t, []int64{1, 2, 3}, ts) + require.Equal(t, []float64{1, 2, 3}, vs) + require.Equal(t, []int64{0, 1, 2}, st) +} diff --git a/storage/merge.go b/storage/merge.go index 12d6d3ac0d..76bf0994e0 100644 --- a/storage/merge.go +++ b/storage/merge.go @@ -599,6 +599,13 @@ func (c *chainSampleIterator) AtT() int64 { return c.curr.AtT() } +func (c *chainSampleIterator) AtST() int64 { + if c.curr == nil { + panic("chainSampleIterator.AtST called before first .Next or after .Next returned false.") + } + return c.curr.AtST() +} + func (c *chainSampleIterator) Next() chunkenc.ValueType { var ( currT int64 diff --git a/storage/merge_test.go b/storage/merge_test.go index 6e2daaeb3a..0060950d6f 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -1716,6 +1716,10 @@ func (errIterator) AtT() int64 { return 0 } +func (errIterator) AtST() int64 { + return 0 +} + func (e errIterator) Err() error { return e.err } diff --git a/storage/remote/codec.go b/storage/remote/codec.go index 9f0fb7d92a..c689a51164 100644 --- a/storage/remote/codec.go +++ b/storage/remote/codec.go @@ -564,6 +564,12 @@ func (c *concreteSeriesIterator) AtT() int64 { return c.series.floats[c.floatsCur].Timestamp } +// TODO(krajorama): implement AtST. Maybe. concreteSeriesIterator is used +// for turning query results into an iterable, but query results do not have ST. +func (*concreteSeriesIterator) AtST() int64 { + return 0 +} + const noTS = int64(math.MaxInt64) // Next implements chunkenc.Iterator. @@ -832,6 +838,11 @@ func (it *chunkedSeriesIterator) AtT() int64 { return it.cur.AtT() } +// TODO(krajorama): test AtST once we have a chunk format that provides ST. +func (it *chunkedSeriesIterator) AtST() int64 { + return it.cur.AtST() +} + func (it *chunkedSeriesIterator) Err() error { return it.err } diff --git a/storage/remote/codec_test.go b/storage/remote/codec_test.go index e6e7813c7b..5da8c8176c 100644 --- a/storage/remote/codec_test.go +++ b/storage/remote/codec_test.go @@ -1146,7 +1146,7 @@ func buildTestChunks(t *testing.T) []prompb.Chunk { minTimeMs := time for j := range numSamplesPerTestChunk { - a.Append(time, float64(i+j)) + a.Append(0, time, float64(i+j)) time += int64(1000) } diff --git a/storage/series.go b/storage/series.go index 7e130d494d..d114438078 100644 --- a/storage/series.go +++ b/storage/series.go @@ -138,6 +138,11 @@ func (it *listSeriesIterator) AtT() int64 { return s.T() } +func (it *listSeriesIterator) AtST() int64 { + s := it.samples.Get(it.idx) + return s.ST() +} + func (it *listSeriesIterator) Next() chunkenc.ValueType { it.idx++ if it.idx >= it.samples.Len() { @@ -355,18 +360,20 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator { lastType = typ var ( - t int64 - v float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + st, t int64 + v float64 + h *histogram.Histogram + fh *histogram.FloatHistogram ) switch typ { case chunkenc.ValFloat: t, v = seriesIter.At() - app.Append(t, v) + st = seriesIter.AtST() + app.Append(st, t, v) case chunkenc.ValHistogram: t, h = seriesIter.AtHistogram(nil) - newChk, recoded, app, err = app.AppendHistogram(nil, t, h, false) + st = seriesIter.AtST() + newChk, recoded, app, err = app.AppendHistogram(nil, st, t, h, false) if err != nil { return errChunksIterator{err: err} } @@ -381,7 +388,8 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator { } case chunkenc.ValFloatHistogram: t, fh = seriesIter.AtFloatHistogram(nil) - newChk, recoded, app, err = app.AppendFloatHistogram(nil, t, fh, false) + st = seriesIter.AtST() + newChk, recoded, app, err = app.AppendFloatHistogram(nil, st, t, fh, false) if err != nil { return errChunksIterator{err: err} } diff --git a/tsdb/chunkenc/chunk.go b/tsdb/chunkenc/chunk.go index fed28c5701..d5e028e681 100644 --- a/tsdb/chunkenc/chunk.go +++ b/tsdb/chunkenc/chunk.go @@ -99,9 +99,9 @@ type Iterable interface { Iterator(Iterator) Iterator } -// Appender adds sample pairs to a chunk. +// Appender adds sample with start timestamp, timestamp, and value to a chunk. type Appender interface { - Append(int64, float64) + Append(st, t int64, v float64) // AppendHistogram and AppendFloatHistogram append a histogram sample to a histogram or float histogram chunk. // Appending a histogram may require creating a completely new chunk or recoding (changing) the current chunk. @@ -114,8 +114,8 @@ type Appender interface { // The returned bool isRecoded can be used to distinguish between the new Chunk c being a completely new Chunk // or the current Chunk recoded to a new Chunk. // The Appender app that can be used for the next append is always returned. - AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) - AppendFloatHistogram(prev *FloatHistogramAppender, t int64, h *histogram.FloatHistogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) + AppendHistogram(prev *HistogramAppender, st, t int64, h *histogram.Histogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) + AppendFloatHistogram(prev *FloatHistogramAppender, st, t int64, h *histogram.FloatHistogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) } // Iterator is a simple iterator that can only get the next value. @@ -151,6 +151,10 @@ type Iterator interface { // AtT returns the current timestamp. // Before the iterator has advanced, the behaviour is unspecified. AtT() int64 + // AtST returns the current start timestamp. + // Return 0 if the start timestamp is not implemented or not set. + // Before the iterator has advanced, the behaviour is unspecified. + AtST() int64 // Err returns the current error. It should be used only after the // iterator is exhausted, i.e. `Next` or `Seek` have returned ValNone. Err() error @@ -208,25 +212,30 @@ func (v ValueType) NewChunk() (Chunk, error) { } } -// MockSeriesIterator returns an iterator for a mock series with custom timeStamps and values. -func MockSeriesIterator(timestamps []int64, values []float64) Iterator { +// MockSeriesIterator returns an iterator for a mock series with custom +// start timestamp, timestamps, and values. +// Start timestamps is optional, pass nil or empty slice to indicate no start +// timestamps. +func MockSeriesIterator(startTimestamps, timestamps []int64, values []float64) Iterator { return &mockSeriesIterator{ - timeStamps: timestamps, - values: values, - currIndex: -1, + startTimestamps: startTimestamps, + timestamps: timestamps, + values: values, + currIndex: -1, } } type mockSeriesIterator struct { - timeStamps []int64 - values []float64 - currIndex int + timestamps []int64 + startTimestamps []int64 + values []float64 + currIndex int } func (*mockSeriesIterator) Seek(int64) ValueType { return ValNone } func (it *mockSeriesIterator) At() (int64, float64) { - return it.timeStamps[it.currIndex], it.values[it.currIndex] + return it.timestamps[it.currIndex], it.values[it.currIndex] } func (*mockSeriesIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) { @@ -238,11 +247,18 @@ func (*mockSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, * } func (it *mockSeriesIterator) AtT() int64 { - return it.timeStamps[it.currIndex] + return it.timestamps[it.currIndex] +} + +func (it *mockSeriesIterator) AtST() int64 { + if len(it.startTimestamps) == 0 { + return 0 + } + return it.startTimestamps[it.currIndex] } func (it *mockSeriesIterator) Next() ValueType { - if it.currIndex < len(it.timeStamps)-1 { + if it.currIndex < len(it.timestamps)-1 { it.currIndex++ return ValFloat } @@ -268,8 +284,9 @@ func (nopIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogra func (nopIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) { return math.MinInt64, nil } -func (nopIterator) AtT() int64 { return math.MinInt64 } -func (nopIterator) Err() error { return nil } +func (nopIterator) AtT() int64 { return math.MinInt64 } +func (nopIterator) AtST() int64 { return 0 } +func (nopIterator) Err() error { return nil } // Pool is used to create and reuse chunk references to avoid allocations. type Pool interface { diff --git a/tsdb/chunkenc/chunk_test.go b/tsdb/chunkenc/chunk_test.go index d2d0e4c053..41bb23ddd1 100644 --- a/tsdb/chunkenc/chunk_test.go +++ b/tsdb/chunkenc/chunk_test.go @@ -65,7 +65,7 @@ func testChunk(t *testing.T, c Chunk) { require.NoError(t, err) } - app.Append(ts, v) + app.Append(0, ts, v) exp = append(exp, pair{t: ts, v: v}) } @@ -226,7 +226,7 @@ func benchmarkIterator(b *testing.B, newChunk func() Chunk) { if j > 250 { break } - a.Append(p.t, p.v) + a.Append(0, p.t, p.v) j++ } } @@ -303,7 +303,7 @@ func benchmarkAppender(b *testing.B, deltas func() (int64, float64), newChunk fu b.Fatalf("get appender: %s", err) } for _, p := range exp { - a.Append(p.t, p.v) + a.Append(0, p.t, p.v) } } } diff --git a/tsdb/chunkenc/float_histogram.go b/tsdb/chunkenc/float_histogram.go index 797bc596b5..6af2fa68e2 100644 --- a/tsdb/chunkenc/float_histogram.go +++ b/tsdb/chunkenc/float_histogram.go @@ -195,7 +195,7 @@ func (a *FloatHistogramAppender) NumSamples() int { // Append implements Appender. This implementation panics because normal float // samples must never be appended to a histogram chunk. -func (*FloatHistogramAppender) Append(int64, float64) { +func (*FloatHistogramAppender) Append(int64, int64, float64) { panic("appended a float sample to a histogram chunk") } @@ -682,11 +682,11 @@ func (*FloatHistogramAppender) recodeHistogram( } } -func (*FloatHistogramAppender) AppendHistogram(*HistogramAppender, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) { +func (*FloatHistogramAppender) AppendHistogram(*HistogramAppender, int64, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) { panic("appended a histogram sample to a float histogram chunk") } -func (a *FloatHistogramAppender) AppendFloatHistogram(prev *FloatHistogramAppender, t int64, h *histogram.FloatHistogram, appendOnly bool) (Chunk, bool, Appender, error) { +func (a *FloatHistogramAppender) AppendFloatHistogram(prev *FloatHistogramAppender, _, t int64, h *histogram.FloatHistogram, appendOnly bool) (Chunk, bool, Appender, error) { if a.NumSamples() == 0 { a.appendFloatHistogram(t, h) if h.CounterResetHint == histogram.GaugeType { @@ -938,6 +938,10 @@ func (it *floatHistogramIterator) AtT() int64 { return it.t } +func (*floatHistogramIterator) AtST() int64 { + return 0 +} + func (it *floatHistogramIterator) Err() error { return it.err } diff --git a/tsdb/chunkenc/float_histogram_test.go b/tsdb/chunkenc/float_histogram_test.go index f27de97516..cbeb3171ce 100644 --- a/tsdb/chunkenc/float_histogram_test.go +++ b/tsdb/chunkenc/float_histogram_test.go @@ -63,7 +63,7 @@ func TestFirstFloatHistogramExplicitCounterReset(t *testing.T) { chk := NewFloatHistogramChunk() app, err := chk.Appender() require.NoError(t, err) - newChk, recoded, newApp, err := app.AppendFloatHistogram(nil, 0, h, false) + newChk, recoded, newApp, err := app.AppendFloatHistogram(nil, 0, 0, h, false) require.NoError(t, err) require.Nil(t, newChk) require.False(t, recoded) @@ -101,7 +101,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) { }, NegativeBuckets: []int64{2, 1, -1, -1}, // counts: 2, 3, 2, 1 (total 8) } - chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false) + chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false) require.NoError(t, err) require.Nil(t, chk) exp = append(exp, floatResult{t: ts, h: h.ToFloat(nil)}) @@ -115,7 +115,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) { h.Sum = 24.4 h.PositiveBuckets = []int64{5, -2, 1, -2} // counts: 5, 3, 4, 2 (total 14) h.NegativeBuckets = []int64{4, -1, 1, -1} // counts: 4, 3, 4, 4 (total 15) - chk, _, _, err = app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false) + chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false) require.NoError(t, err) require.Nil(t, chk) expH := h.ToFloat(nil) @@ -134,7 +134,7 @@ func TestFloatHistogramChunkSameBuckets(t *testing.T) { h.Sum = 24.4 h.PositiveBuckets = []int64{6, 1, -3, 6} // counts: 6, 7, 4, 10 (total 27) h.NegativeBuckets = []int64{5, 1, -2, 3} // counts: 5, 6, 4, 7 (total 22) - chk, _, _, err = app.AppendFloatHistogram(nil, ts, h.ToFloat(nil), false) + chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts, h.ToFloat(nil), false) require.NoError(t, err) require.Nil(t, chk) expH = h.ToFloat(nil) @@ -224,7 +224,7 @@ func TestFloatHistogramChunkBucketChanges(t *testing.T) { NegativeBuckets: []int64{1}, } - chk, _, app, err := app.AppendFloatHistogram(nil, ts1, h1.ToFloat(nil), false) + chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts1, h1.ToFloat(nil), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -260,7 +260,7 @@ func TestFloatHistogramChunkBucketChanges(t *testing.T) { require.True(t, ok) // Only new buckets came in. require.False(t, cr) c, app = hApp.recode(posInterjections, negInterjections, h2.PositiveSpans, h2.NegativeSpans) - chk, _, _, err = app.AppendFloatHistogram(nil, ts2, h2.ToFloat(nil), false) + chk, _, _, err = app.AppendFloatHistogram(nil, 0, ts2, h2.ToFloat(nil), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 2, c.NumSamples()) @@ -330,7 +330,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) { ts := int64(1234567890) - chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.Copy(), false) + chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.Copy(), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -557,7 +557,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) { nextChunk := NewFloatHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -575,7 +575,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) { nextChunk := NewFloatHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -602,7 +602,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) { nextChunk := NewFloatHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendFloatHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -717,7 +717,7 @@ func TestFloatHistogramChunkAppendable(t *testing.T) { func assertNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader, expectHint histogram.CounterResetHint) { oldChunkBytes := oldChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false) require.Equal(t, oldChunkBytes, oldChunk.Bytes()) // Sanity check that previous chunk is untouched. require.NoError(t, err) require.NotNil(t, newChunk) @@ -732,7 +732,7 @@ func assertNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *Fl func assertNoNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader) { oldChunkBytes := oldChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false) require.Greater(t, len(oldChunk.Bytes()), len(oldChunkBytes)) // Check that current chunk is bigger than previously. require.NoError(t, err) require.Nil(t, newChunk) @@ -745,7 +745,7 @@ func assertNoNewFloatHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp * func assertRecodedFloatHistogramChunkOnAppend(t *testing.T, prevChunk Chunk, hApp *FloatHistogramAppender, ts int64, h *histogram.FloatHistogram, expectHeader CounterResetHeader) { prevChunkBytes := prevChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendFloatHistogram(nil, 0, ts, h, false) require.Equal(t, prevChunkBytes, prevChunk.Bytes()) // Sanity check that previous chunk is untouched. This may change in the future if we implement in-place recoding. require.NoError(t, err) require.NotNil(t, newChunk) @@ -959,7 +959,7 @@ func TestFloatHistogramChunkAppendableWithEmptySpan(t *testing.T) { require.NoError(t, err) require.Equal(t, 0, c.NumSamples()) - _, _, _, err = app.AppendFloatHistogram(nil, 1, tc.h1, true) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, tc.h1, true) require.NoError(t, err) require.Equal(t, 1, c.NumSamples()) hApp, _ := app.(*FloatHistogramAppender) @@ -1019,7 +1019,7 @@ func TestFloatHistogramChunkAppendableGauge(t *testing.T) { ts := int64(1234567890) - chk, _, app, err := app.AppendFloatHistogram(nil, ts, h.Copy(), false) + chk, _, app, err := app.AppendFloatHistogram(nil, 0, ts, h.Copy(), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -1259,7 +1259,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestFloatHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1267,7 +1267,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.Schema++ - c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "float histogram schema change") @@ -1281,7 +1281,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestFloatHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1289,7 +1289,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.CounterResetHint = histogram.CounterReset - c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "float histogram counter reset") @@ -1303,7 +1303,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestCustomBucketsFloatHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendFloatHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendFloatHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1311,7 +1311,7 @@ func TestFloatHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.CustomValues = []float64{0, 1, 2, 3, 4, 5, 6, 7} - c, isRecoded, _, err = app.AppendFloatHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendFloatHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "float histogram counter reset") @@ -1344,10 +1344,10 @@ func TestFloatHistogramUniqueSpansAfterNext(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1390,10 +1390,10 @@ func TestFloatHistogramUniqueCustomValuesAfterNext(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1435,7 +1435,7 @@ func TestFloatHistogramEmptyBucketsWithGaps(t *testing.T) { c := NewFloatHistogramChunk() app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 1, h1, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h1, false) require.NoError(t, err) h2 := &histogram.FloatHistogram{ @@ -1448,7 +1448,7 @@ func TestFloatHistogramEmptyBucketsWithGaps(t *testing.T) { } require.NoError(t, h2.Validate()) - newC, recoded, _, err := app.AppendFloatHistogram(nil, 2, h2, false) + newC, recoded, _, err := app.AppendFloatHistogram(nil, 0, 2, h2, false) require.NoError(t, err) require.True(t, recoded) require.NotNil(t, newC) @@ -1483,7 +1483,7 @@ func TestFloatHistogramIteratorFailIfSchemaInValid(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 1, h, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h, false) require.NoError(t, err) it := c.Iterator(nil) @@ -1512,7 +1512,7 @@ func TestFloatHistogramIteratorReduceSchema(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendFloatHistogram(nil, 1, h, false) + _, _, _, err = app.AppendFloatHistogram(nil, 0, 1, h, false) require.NoError(t, err) it := c.Iterator(nil) diff --git a/tsdb/chunkenc/histogram.go b/tsdb/chunkenc/histogram.go index e05c49c81d..4e77f387d3 100644 --- a/tsdb/chunkenc/histogram.go +++ b/tsdb/chunkenc/histogram.go @@ -219,7 +219,7 @@ func (a *HistogramAppender) NumSamples() int { // Append implements Appender. This implementation panics because normal float // samples must never be appended to a histogram chunk. -func (*HistogramAppender) Append(int64, float64) { +func (*HistogramAppender) Append(int64, int64, float64) { panic("appended a float sample to a histogram chunk") } @@ -734,11 +734,11 @@ func (a *HistogramAppender) writeSumDelta(v float64) { xorWrite(a.b, v, a.sum, &a.leading, &a.trailing) } -func (*HistogramAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) { +func (*HistogramAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) { panic("appended a float histogram sample to a histogram chunk") } -func (a *HistogramAppender) AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (Chunk, bool, Appender, error) { +func (a *HistogramAppender) AppendHistogram(prev *HistogramAppender, _, t int64, h *histogram.Histogram, appendOnly bool) (Chunk, bool, Appender, error) { if a.NumSamples() == 0 { a.appendHistogram(t, h) if h.CounterResetHint == histogram.GaugeType { @@ -1075,6 +1075,10 @@ func (it *histogramIterator) AtT() int64 { return it.t } +func (*histogramIterator) AtST() int64 { + return 0 +} + func (it *histogramIterator) Err() error { return it.err } diff --git a/tsdb/chunkenc/histogram_test.go b/tsdb/chunkenc/histogram_test.go index 38bbd58465..6ac8500e64 100644 --- a/tsdb/chunkenc/histogram_test.go +++ b/tsdb/chunkenc/histogram_test.go @@ -64,7 +64,7 @@ func TestFirstHistogramExplicitCounterReset(t *testing.T) { chk := NewHistogramChunk() app, err := chk.Appender() require.NoError(t, err) - newChk, recoded, newApp, err := app.AppendHistogram(nil, 0, h, false) + newChk, recoded, newApp, err := app.AppendHistogram(nil, 0, 0, h, false) require.NoError(t, err) require.Nil(t, newChk) require.False(t, recoded) @@ -102,7 +102,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) { }, NegativeBuckets: []int64{2, 1, -1, -1}, // counts: 2, 3, 2, 1 (total 8) } - chk, _, app, err := app.AppendHistogram(nil, ts, h, false) + chk, _, app, err := app.AppendHistogram(nil, 0, ts, h, false) require.NoError(t, err) require.Nil(t, chk) exp = append(exp, result{t: ts, h: h, fh: h.ToFloat(nil)}) @@ -116,7 +116,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) { h.Sum = 24.4 h.PositiveBuckets = []int64{5, -2, 1, -2} // counts: 5, 3, 4, 2 (total 14) h.NegativeBuckets = []int64{4, -1, 1, -1} // counts: 4, 3, 4, 4 (total 15) - chk, _, _, err = app.AppendHistogram(nil, ts, h, false) + chk, _, _, err = app.AppendHistogram(nil, 0, ts, h, false) require.NoError(t, err) require.Nil(t, chk) hExp := h.Copy() @@ -135,7 +135,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) { h.Sum = 24.4 h.PositiveBuckets = []int64{6, 1, -3, 6} // counts: 6, 7, 4, 10 (total 27) h.NegativeBuckets = []int64{5, 1, -2, 3} // counts: 5, 6, 4, 7 (total 22) - chk, _, _, err = app.AppendHistogram(nil, ts, h, false) + chk, _, _, err = app.AppendHistogram(nil, 0, ts, h, false) require.NoError(t, err) require.Nil(t, chk) hExp = h.Copy() @@ -235,7 +235,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) { NegativeBuckets: []int64{1}, } - chk, _, app, err := app.AppendHistogram(nil, ts1, h1, false) + chk, _, app, err := app.AppendHistogram(nil, 0, ts1, h1, false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -271,7 +271,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) { require.True(t, ok) // Only new buckets came in. require.Equal(t, NotCounterReset, cr) c, app = hApp.recode(posInterjections, negInterjections, h2.PositiveSpans, h2.NegativeSpans) - chk, _, _, err = app.AppendHistogram(nil, ts2, h2, false) + chk, _, _, err = app.AppendHistogram(nil, 0, ts2, h2, false) require.NoError(t, err) require.Nil(t, chk) @@ -344,7 +344,7 @@ func TestHistogramChunkAppendable(t *testing.T) { ts := int64(1234567890) - chk, _, app, err := app.AppendHistogram(nil, ts, h.Copy(), false) + chk, _, app, err := app.AppendHistogram(nil, 0, ts, h.Copy(), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -581,7 +581,7 @@ func TestHistogramChunkAppendable(t *testing.T) { nextChunk := NewHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -599,7 +599,7 @@ func TestHistogramChunkAppendable(t *testing.T) { nextChunk := NewHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -629,7 +629,7 @@ func TestHistogramChunkAppendable(t *testing.T) { nextChunk := NewHistogramChunk() app, err := nextChunk.Appender() require.NoError(t, err) - newChunk, recoded, newApp, err := app.AppendHistogram(hApp, ts+1, h2, false) + newChunk, recoded, newApp, err := app.AppendHistogram(hApp, 0, ts+1, h2, false) require.NoError(t, err) require.Nil(t, newChunk) require.False(t, recoded) @@ -776,7 +776,7 @@ func TestHistogramChunkAppendable(t *testing.T) { func assertNewHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader, expectHint histogram.CounterResetHint) { oldChunkBytes := oldChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false) require.Equal(t, oldChunkBytes, oldChunk.Bytes()) // Sanity check that previous chunk is untouched. require.NoError(t, err) require.NotNil(t, newChunk) @@ -791,7 +791,7 @@ func assertNewHistogramChunkOnAppend(t *testing.T, oldChunk Chunk, hApp *Histogr func assertNoNewHistogramChunkOnAppend(t *testing.T, currChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader) { prevChunkBytes := currChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false) require.Greater(t, len(currChunk.Bytes()), len(prevChunkBytes)) // Check that current chunk is bigger than previously. require.NoError(t, err) require.Nil(t, newChunk) @@ -804,7 +804,7 @@ func assertNoNewHistogramChunkOnAppend(t *testing.T, currChunk Chunk, hApp *Hist func assertRecodedHistogramChunkOnAppend(t *testing.T, prevChunk Chunk, hApp *HistogramAppender, ts int64, h *histogram.Histogram, expectHeader CounterResetHeader) { prevChunkBytes := prevChunk.Bytes() - newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, ts, h, false) + newChunk, recoded, newAppender, err := hApp.AppendHistogram(nil, 0, ts, h, false) require.Equal(t, prevChunkBytes, prevChunk.Bytes()) // Sanity check that previous chunk is untouched. This may change in the future if we implement in-place recoding. require.NoError(t, err) require.NotNil(t, newChunk) @@ -1029,7 +1029,7 @@ func TestHistogramChunkAppendableWithEmptySpan(t *testing.T) { require.NoError(t, err) require.Equal(t, 0, c.NumSamples()) - _, _, _, err = app.AppendHistogram(nil, 1, tc.h1, true) + _, _, _, err = app.AppendHistogram(nil, 1, 0, tc.h1, true) require.NoError(t, err) require.Equal(t, 1, c.NumSamples()) hApp, _ := app.(*HistogramAppender) @@ -1172,7 +1172,7 @@ func TestAtFloatHistogram(t *testing.T) { app, err := chk.Appender() require.NoError(t, err) for i := range input { - newc, _, _, err := app.AppendHistogram(nil, int64(i), &input[i], false) + newc, _, _, err := app.AppendHistogram(nil, 0, int64(i), &input[i], false) require.NoError(t, err) require.Nil(t, newc) } @@ -1230,7 +1230,7 @@ func TestHistogramChunkAppendableGauge(t *testing.T) { ts := int64(1234567890) - chk, _, app, err := app.AppendHistogram(nil, ts, h.Copy(), false) + chk, _, app, err := app.AppendHistogram(nil, 0, ts, h.Copy(), false) require.NoError(t, err) require.Nil(t, chk) require.Equal(t, 1, c.NumSamples()) @@ -1471,7 +1471,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1479,7 +1479,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.Schema++ - c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "histogram schema change") @@ -1493,7 +1493,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1501,7 +1501,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.CounterResetHint = histogram.CounterReset - c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "histogram counter reset") @@ -1515,7 +1515,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { h := tsdbutil.GenerateTestCustomBucketsHistogram(0) var isRecoded bool - c, isRecoded, app, err = app.AppendHistogram(nil, 1, h, true) + c, isRecoded, app, err = app.AppendHistogram(nil, 0, 1, h, true) require.Nil(t, c) require.False(t, isRecoded) require.NoError(t, err) @@ -1523,7 +1523,7 @@ func TestHistogramAppendOnlyErrors(t *testing.T) { // Add erroring histogram. h2 := h.Copy() h2.CustomValues = []float64{0, 1, 2, 3, 4, 5, 6, 7} - c, isRecoded, _, err = app.AppendHistogram(nil, 2, h2, true) + c, isRecoded, _, err = app.AppendHistogram(nil, 0, 2, h2, true) require.Nil(t, c) require.False(t, isRecoded) require.EqualError(t, err, "histogram counter reset") @@ -1556,10 +1556,10 @@ func TestHistogramUniqueSpansAfterNextWithAtHistogram(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1607,10 +1607,10 @@ func TestHistogramUniqueSpansAfterNextWithAtFloatHistogram(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1653,10 +1653,10 @@ func TestHistogramCustomValuesInternedAfterNextWithAtHistogram(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1699,10 +1699,10 @@ func TestHistogramCustomValuesInternedAfterNextWithAtFloatHistogram(t *testing.T app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 0, h1, false) + _, _, _, err = app.AppendHistogram(nil, 0, 0, h1, false) require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h2, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h2, false) require.NoError(t, err) // Create an iterator and advance to the first histogram. @@ -1754,7 +1754,7 @@ func BenchmarkAppendable(b *testing.B) { b.Fatal(err) } - _, _, _, err = app.AppendHistogram(nil, 1, h, true) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h, true) if err != nil { b.Fatal(err) } @@ -1791,7 +1791,7 @@ func TestIntHistogramEmptyBucketsWithGaps(t *testing.T) { c := NewHistogramChunk() app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h1, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h1, false) require.NoError(t, err) h2 := &histogram.Histogram{ @@ -1804,7 +1804,7 @@ func TestIntHistogramEmptyBucketsWithGaps(t *testing.T) { } require.NoError(t, h2.Validate()) - newC, recoded, _, err := app.AppendHistogram(nil, 2, h2, false) + newC, recoded, _, err := app.AppendHistogram(nil, 0, 2, h2, false) require.NoError(t, err) require.True(t, recoded) require.NotNil(t, newC) @@ -1839,7 +1839,7 @@ func TestHistogramIteratorFailIfSchemaInValid(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h, false) require.NoError(t, err) it := c.Iterator(nil) @@ -1868,7 +1868,7 @@ func TestHistogramIteratorReduceSchema(t *testing.T) { app, err := c.Appender() require.NoError(t, err) - _, _, _, err = app.AppendHistogram(nil, 1, h, false) + _, _, _, err = app.AppendHistogram(nil, 0, 1, h, false) require.NoError(t, err) it := c.Iterator(nil) diff --git a/tsdb/chunkenc/xor.go b/tsdb/chunkenc/xor.go index bbe12a893b..5a9a59dc22 100644 --- a/tsdb/chunkenc/xor.go +++ b/tsdb/chunkenc/xor.go @@ -158,7 +158,7 @@ type xorAppender struct { trailing uint8 } -func (a *xorAppender) Append(t int64, v float64) { +func (a *xorAppender) Append(_, t int64, v float64) { var tDelta uint64 num := binary.BigEndian.Uint16(a.b.bytes()) switch num { @@ -225,11 +225,11 @@ func (a *xorAppender) writeVDelta(v float64) { xorWrite(a.b, v, a.v, &a.leading, &a.trailing) } -func (*xorAppender) AppendHistogram(*HistogramAppender, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) { +func (*xorAppender) AppendHistogram(*HistogramAppender, int64, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) { panic("appended a histogram sample to a float chunk") } -func (*xorAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) { +func (*xorAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) { panic("appended a float histogram sample to a float chunk") } @@ -277,6 +277,10 @@ func (it *xorIterator) AtT() int64 { return it.t } +func (*xorIterator) AtST() int64 { + return 0 +} + func (it *xorIterator) Err() error { return it.err } diff --git a/tsdb/chunkenc/xor_test.go b/tsdb/chunkenc/xor_test.go index 904e536b49..b30c65283d 100644 --- a/tsdb/chunkenc/xor_test.go +++ b/tsdb/chunkenc/xor_test.go @@ -24,7 +24,7 @@ func BenchmarkXorRead(b *testing.B) { app, err := c.Appender() require.NoError(b, err) for i := int64(0); i < 120*1000; i += 1000 { - app.Append(i, float64(i)+float64(i)/10+float64(i)/100+float64(i)/1000) + app.Append(0, i, float64(i)+float64(i)/10+float64(i)/100+float64(i)/1000) } b.ReportAllocs() diff --git a/tsdb/chunks/chunks.go b/tsdb/chunks/chunks.go index f8fc9a2e95..ce4c9d3d78 100644 --- a/tsdb/chunks/chunks.go +++ b/tsdb/chunks/chunks.go @@ -135,6 +135,7 @@ type Meta struct { } // ChunkFromSamples requires all samples to have the same type. +// TODO(krajorama): test with ST when chunk formats support it. func ChunkFromSamples(s []Sample) (Meta, error) { return ChunkFromSamplesGeneric(SampleSlice(s)) } @@ -164,9 +165,9 @@ func ChunkFromSamplesGeneric(s Samples) (Meta, error) { for i := 0; i < s.Len(); i++ { switch sampleType { case chunkenc.ValFloat: - ca.Append(s.Get(i).T(), s.Get(i).F()) + ca.Append(s.Get(i).ST(), s.Get(i).T(), s.Get(i).F()) case chunkenc.ValHistogram: - newChunk, _, ca, err = ca.AppendHistogram(nil, s.Get(i).T(), s.Get(i).H(), false) + newChunk, _, ca, err = ca.AppendHistogram(nil, s.Get(i).ST(), s.Get(i).T(), s.Get(i).H(), false) if err != nil { return emptyChunk, err } @@ -174,7 +175,7 @@ func ChunkFromSamplesGeneric(s Samples) (Meta, error) { return emptyChunk, errors.New("did not expect to start a second chunk") } case chunkenc.ValFloatHistogram: - newChunk, _, ca, err = ca.AppendFloatHistogram(nil, s.Get(i).T(), s.Get(i).FH(), false) + newChunk, _, ca, err = ca.AppendFloatHistogram(nil, s.Get(i).ST(), s.Get(i).T(), s.Get(i).FH(), false) if err != nil { return emptyChunk, err } diff --git a/tsdb/chunks/head_chunks_test.go b/tsdb/chunks/head_chunks_test.go index 17efd44aa6..c3cbc5a618 100644 --- a/tsdb/chunks/head_chunks_test.go +++ b/tsdb/chunks/head_chunks_test.go @@ -559,7 +559,7 @@ func randomChunk(t *testing.T) chunkenc.Chunk { app, err := chunk.Appender() require.NoError(t, err) for range length { - app.Append(rand.Int63(), rand.Float64()) + app.Append(0, rand.Int63(), rand.Float64()) } return chunk } diff --git a/tsdb/chunks/samples.go b/tsdb/chunks/samples.go index 8097bcd72b..280f2dd606 100644 --- a/tsdb/chunks/samples.go +++ b/tsdb/chunks/samples.go @@ -25,6 +25,7 @@ type Samples interface { type Sample interface { T() int64 + ST() int64 F() float64 H() *histogram.Histogram FH() *histogram.FloatHistogram @@ -38,16 +39,20 @@ func (s SampleSlice) Get(i int) Sample { return s[i] } func (s SampleSlice) Len() int { return len(s) } type sample struct { - t int64 - f float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + st, t int64 + f float64 + h *histogram.Histogram + fh *histogram.FloatHistogram } func (s sample) T() int64 { return s.t } +func (s sample) ST() int64 { + return s.st +} + func (s sample) F() float64 { return s.f } diff --git a/tsdb/head.go b/tsdb/head.go index 955c0ae5a7..213846aa35 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -2111,7 +2111,10 @@ func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHi return sample{t, v, h, fh} } -func (s sample) T() int64 { return s.t } +func (s sample) T() int64 { return s.t } + +// TODO(krajorama): implement ST. +func (sample) ST() int64 { return 0 } func (s sample) F() float64 { return s.f } func (s sample) H() *histogram.Histogram { return s.h } func (s sample) FH() *histogram.FloatHistogram { return s.fh } diff --git a/tsdb/head_append.go b/tsdb/head_append.go index fceb80bd34..6a04fd16d9 100644 --- a/tsdb/head_append.go +++ b/tsdb/head_append.go @@ -1843,7 +1843,8 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, o chunkOpts) (sa if !sampleInOrder { return sampleInOrder, chunkCreated } - s.app.Append(t, v) + // TODO(krajorama): pass ST. + s.app.Append(0, t, v) c.maxTime = t @@ -1885,7 +1886,8 @@ func (s *memSeries) appendHistogram(t int64, h *histogram.Histogram, appendID ui prevApp = nil } - newChunk, recoded, s.app, _ = s.app.AppendHistogram(prevApp, t, h, false) // false=request a new chunk if needed + // TODO(krajorama): pass ST. + newChunk, recoded, s.app, _ = s.app.AppendHistogram(prevApp, 0, t, h, false) // false=request a new chunk if needed s.lastHistogramValue = h s.lastFloatHistogramValue = nil @@ -1942,7 +1944,8 @@ func (s *memSeries) appendFloatHistogram(t int64, fh *histogram.FloatHistogram, prevApp = nil } - newChunk, recoded, s.app, _ = s.app.AppendFloatHistogram(prevApp, t, fh, false) // False means request a new chunk if needed. + // TODO(krajorama): pass ST. + newChunk, recoded, s.app, _ = s.app.AppendFloatHistogram(prevApp, 0, t, fh, false) // False means request a new chunk if needed. s.lastHistogramValue = nil s.lastFloatHistogramValue = fh diff --git a/tsdb/ooo_head.go b/tsdb/ooo_head.go index c6ae924372..bbb0f10e77 100644 --- a/tsdb/ooo_head.go +++ b/tsdb/ooo_head.go @@ -125,7 +125,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error } switch encoding { case chunkenc.EncXOR: - app.Append(s.t, s.f) + // TODO(krajorama): pass ST. + app.Append(0, s.t, s.f) case chunkenc.EncHistogram: // Ignoring ok is ok, since we don't want to compare to the wrong previous appender anyway. prevHApp, _ := prevApp.(*chunkenc.HistogramAppender) @@ -133,7 +134,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error newChunk chunkenc.Chunk recoded bool ) - newChunk, recoded, app, _ = app.AppendHistogram(prevHApp, s.t, s.h, false) + // TODO(krajorama): pass ST. + newChunk, recoded, app, _ = app.AppendHistogram(prevHApp, 0, s.t, s.h, false) if newChunk != nil { // A new chunk was allocated. if !recoded { chks = append(chks, memChunk{chunk, cmint, cmaxt, nil}) @@ -148,7 +150,8 @@ func (o *OOOChunk) ToEncodedChunks(mint, maxt int64) (chks []memChunk, err error newChunk chunkenc.Chunk recoded bool ) - newChunk, recoded, app, _ = app.AppendFloatHistogram(prevHApp, s.t, s.fh, false) + // TODO(krajorama): pass ST. + newChunk, recoded, app, _ = app.AppendFloatHistogram(prevHApp, 0, s.t, s.fh, false) if newChunk != nil { // A new chunk was allocated. if !recoded { chks = append(chks, memChunk{chunk, cmint, cmaxt, nil}) diff --git a/tsdb/querier.go b/tsdb/querier.go index 4a487aa568..ce0292bf24 100644 --- a/tsdb/querier.go +++ b/tsdb/querier.go @@ -788,6 +788,11 @@ func (p *populateWithDelSeriesIterator) AtT() int64 { return p.curr.AtT() } +// AtST TODO(krajorama): test AtST() when chunks support it. +func (p *populateWithDelSeriesIterator) AtST() int64 { + return p.curr.AtST() +} + func (p *populateWithDelSeriesIterator) Err() error { if err := p.populateWithDelGenericSeriesIterator.Err(); err != nil { return err @@ -862,6 +867,7 @@ func (p *populateWithDelChunkSeriesIterator) Next() bool { // populateCurrForSingleChunk sets the fields within p.currMetaWithChunk. This // should be called if the samples in p.currDelIter only form one chunk. +// TODO(krajorama): test ST when chunks support it. func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { valueType := p.currDelIter.Next() if valueType == chunkenc.ValNone { @@ -877,7 +883,7 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { var ( newChunk chunkenc.Chunk app chunkenc.Appender - t int64 + st, t int64 err error ) switch valueType { @@ -893,7 +899,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { } var h *histogram.Histogram t, h = p.currDelIter.AtHistogram(nil) - _, _, app, err = app.AppendHistogram(nil, t, h, true) + st = p.currDelIter.AtST() + _, _, app, err = app.AppendHistogram(nil, st, t, h, true) if err != nil { break } @@ -910,7 +917,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { } var v float64 t, v = p.currDelIter.At() - app.Append(t, v) + st = p.currDelIter.AtST() + app.Append(st, t, v) } case chunkenc.ValFloatHistogram: newChunk = chunkenc.NewFloatHistogramChunk() @@ -924,7 +932,8 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { } var h *histogram.FloatHistogram t, h = p.currDelIter.AtFloatHistogram(nil) - _, _, app, err = app.AppendFloatHistogram(nil, t, h, true) + st = p.currDelIter.AtST() + _, _, app, err = app.AppendFloatHistogram(nil, st, t, h, true) if err != nil { break } @@ -950,6 +959,7 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { // populateChunksFromIterable reads the samples from currDelIter to create // chunks for chunksFromIterable. It also sets p.currMetaWithChunk to the first // chunk. +// TODO(krajorama): test ST when chunks support it. func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool { p.chunksFromIterable = p.chunksFromIterable[:0] p.chunksFromIterableIdx = -1 @@ -965,7 +975,7 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool { var ( // t is the timestamp for the current sample. - t int64 + st, t int64 cmint int64 cmaxt int64 @@ -1004,23 +1014,26 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool { { var v float64 t, v = p.currDelIter.At() - app.Append(t, v) + st = p.currDelIter.AtST() + app.Append(st, t, v) } case chunkenc.ValHistogram: { var v *histogram.Histogram t, v = p.currDelIter.AtHistogram(nil) + st = p.currDelIter.AtST() // No need to set prevApp as AppendHistogram will set the // counter reset header for the appender that's returned. - newChunk, recoded, app, err = app.AppendHistogram(nil, t, v, false) + newChunk, recoded, app, err = app.AppendHistogram(nil, st, t, v, false) } case chunkenc.ValFloatHistogram: { var v *histogram.FloatHistogram t, v = p.currDelIter.AtFloatHistogram(nil) + st = p.currDelIter.AtST() // No need to set prevApp as AppendHistogram will set the // counter reset header for the appender that's returned. - newChunk, recoded, app, err = app.AppendFloatHistogram(nil, t, v, false) + newChunk, recoded, app, err = app.AppendFloatHistogram(nil, st, t, v, false) } } @@ -1202,6 +1215,11 @@ func (it *DeletedIterator) AtT() int64 { return it.Iter.AtT() } +// AtST TODO(krajorama): test AtST() when chunks support it. +func (it *DeletedIterator) AtST() int64 { + return it.Iter.AtST() +} + func (it *DeletedIterator) Seek(t int64) chunkenc.ValueType { if it.Iter.Err() != nil { return chunkenc.ValNone diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 6933aa617a..57a53c46fe 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -141,7 +141,7 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe app, _ := chunk.Appender() for _, smpl := range chk { require.NotNil(t, smpl.fh, "chunk can only contain one type of sample") - _, _, _, err := app.AppendFloatHistogram(nil, smpl.t, smpl.fh, true) + _, _, _, err := app.AppendFloatHistogram(nil, 0, smpl.t, smpl.fh, true) require.NoError(t, err, "chunk should be appendable") } chkReader[chunkRef] = chunk @@ -150,7 +150,7 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe app, _ := chunk.Appender() for _, smpl := range chk { require.NotNil(t, smpl.h, "chunk can only contain one type of sample") - _, _, _, err := app.AppendHistogram(nil, smpl.t, smpl.h, true) + _, _, _, err := app.AppendHistogram(nil, 0, smpl.t, smpl.h, true) require.NoError(t, err, "chunk should be appendable") } chkReader[chunkRef] = chunk @@ -160,7 +160,7 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe for _, smpl := range chk { require.Nil(t, smpl.h, "chunk can only contain one type of sample") require.Nil(t, smpl.fh, "chunk can only contain one type of sample") - app.Append(smpl.t, smpl.f) + app.Append(0, smpl.t, smpl.f) } chkReader[chunkRef] = chunk } @@ -790,6 +790,10 @@ func (it *mockSampleIterator) AtT() int64 { return it.s[it.idx].T() } +func (it *mockSampleIterator) AtST() int64 { + return it.s[it.idx].ST() +} + func (it *mockSampleIterator) Next() chunkenc.ValueType { if it.idx < len(it.s)-1 { it.idx++ @@ -2096,7 +2100,7 @@ func TestDeletedIterator(t *testing.T) { for i := range 1000 { act[i].t = int64(i) act[i].f = rand.Float64() - app.Append(act[i].t, act[i].f) + app.Append(0, act[i].t, act[i].f) } cases := []struct { @@ -2156,7 +2160,7 @@ func TestDeletedIterator_WithSeek(t *testing.T) { for i := range 1000 { act[i].t = int64(i) act[i].f = float64(i) - app.Append(act[i].t, act[i].f) + app.Append(0, act[i].t, act[i].f) } cases := []struct { From 1e77d9ded85fd2c3db5a6be153d6d85e3d6853ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 08:57:54 +0100 Subject: [PATCH 012/165] storage/buffer.go: add ST to sample types and iterators MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Also fix the single multi line fSample definition to be one liner. Signed-off-by: György Krajcsovits --- storage/buffer.go | 56 +++++++++++++++++++++++++++--------------- storage/buffer_test.go | 5 +--- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/storage/buffer.go b/storage/buffer.go index c6a32821d8..cdf8879f21 100644 --- a/storage/buffer.go +++ b/storage/buffer.go @@ -119,13 +119,16 @@ func (b *BufferedSeriesIterator) Next() chunkenc.ValueType { return chunkenc.ValNone case chunkenc.ValFloat: t, f := b.it.At() - b.buf.addF(fSample{t: t, f: f}) + st := b.it.AtST() + b.buf.addF(fSample{st: st, t: t, f: f}) case chunkenc.ValHistogram: t, h := b.it.AtHistogram(&b.hReader) - b.buf.addH(hSample{t: t, h: h}) + st := b.it.AtST() + b.buf.addH(hSample{st: st, t: t, h: h}) case chunkenc.ValFloatHistogram: t, fh := b.it.AtFloatHistogram(&b.fhReader) - b.buf.addFH(fhSample{t: t, fh: fh}) + st := b.it.AtST() + b.buf.addFH(fhSample{st: st, t: t, fh: fh}) default: panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) } @@ -157,23 +160,27 @@ func (b *BufferedSeriesIterator) AtT() int64 { return b.it.AtT() } +// AtST returns the current sample's start timestamp of the iterator. +func (b *BufferedSeriesIterator) AtST() int64 { + return b.it.AtST() +} + // Err returns the last encountered error. func (b *BufferedSeriesIterator) Err() error { return b.it.Err() } type fSample struct { - t int64 - f float64 + st, t int64 + f float64 } func (s fSample) T() int64 { return s.t } -// TODO(krajorama): implement ST. -func (fSample) ST() int64 { - return 0 +func (s fSample) ST() int64 { + return s.st } func (s fSample) F() float64 { @@ -197,17 +204,16 @@ func (s fSample) Copy() chunks.Sample { } type hSample struct { - t int64 - h *histogram.Histogram + st, t int64 + h *histogram.Histogram } func (s hSample) T() int64 { return s.t } -// TODO(krajorama): implement ST. -func (hSample) ST() int64 { - return 0 +func (s hSample) ST() int64 { + return s.st } func (hSample) F() float64 { @@ -227,21 +233,20 @@ func (hSample) Type() chunkenc.ValueType { } func (s hSample) Copy() chunks.Sample { - return hSample{t: s.t, h: s.h.Copy()} + return hSample{st: s.st, t: s.t, h: s.h.Copy()} } type fhSample struct { - t int64 - fh *histogram.FloatHistogram + st, t int64 + fh *histogram.FloatHistogram } func (s fhSample) T() int64 { return s.t } -// TODO(krajorama): implement ST. -func (fhSample) ST() int64 { - return 0 +func (s fhSample) ST() int64 { + return s.st } func (fhSample) F() float64 { @@ -261,7 +266,7 @@ func (fhSample) Type() chunkenc.ValueType { } func (s fhSample) Copy() chunks.Sample { - return fhSample{t: s.t, fh: s.fh.Copy()} + return fhSample{st: s.st, t: s.t, fh: s.fh.Copy()} } type sampleRing struct { @@ -344,6 +349,7 @@ func (r *sampleRing) iterator() *SampleRingIterator { type SampleRingIterator struct { r *sampleRing i int + st int64 t int64 f float64 h *histogram.Histogram @@ -365,21 +371,25 @@ func (it *SampleRingIterator) Next() chunkenc.ValueType { switch it.r.bufInUse { case fBuf: s := it.r.atF(it.i) + it.st = s.st it.t = s.t it.f = s.f return chunkenc.ValFloat case hBuf: s := it.r.atH(it.i) + it.st = s.st it.t = s.t it.h = s.h return chunkenc.ValHistogram case fhBuf: s := it.r.atFH(it.i) + it.st = s.st it.t = s.t it.fh = s.fh return chunkenc.ValFloatHistogram } s := it.r.at(it.i) + it.st = s.ST() it.t = s.T() switch s.Type() { case chunkenc.ValHistogram: @@ -425,6 +435,10 @@ func (it *SampleRingIterator) AtT() int64 { return it.t } +func (it *SampleRingIterator) AtST() int64 { + return it.st +} + func (r *sampleRing) at(i int) chunks.Sample { j := (r.f + i) % len(r.iBuf) return r.iBuf[j] @@ -666,6 +680,7 @@ func addH(s hSample, buf []hSample, r *sampleRing) []hSample { } buf[r.i].t = s.t + buf[r.i].st = s.st if buf[r.i].h == nil { buf[r.i].h = s.h.Copy() } else { @@ -710,6 +725,7 @@ func addFH(s fhSample, buf []fhSample, r *sampleRing) []fhSample { } buf[r.i].t = s.t + buf[r.i].st = s.st if buf[r.i].fh == nil { buf[r.i].fh = s.fh.Copy() } else { diff --git a/storage/buffer_test.go b/storage/buffer_test.go index beb9d8e71c..e700231756 100644 --- a/storage/buffer_test.go +++ b/storage/buffer_test.go @@ -61,10 +61,7 @@ func TestSampleRing(t *testing.T) { input := []fSample{} for _, t := range c.input { - input = append(input, fSample{ - t: t, - f: float64(rand.Intn(100)), - }) + input = append(input, fSample{t: t, f: float64(rand.Intn(100))}) } for i, s := range input { From a00c0d6a660eb38380c96d801422c694f0506e23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 09:37:15 +0100 Subject: [PATCH 013/165] auto update f/h/fh sample init with positional fields MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit find . -name "*.go" -type f \ -exec sed -E -i 's/((f|h|fh)Sample\{)([^,{:]+,[^,]+\})/\10, \3/g' {} + Signed-off-by: György Krajcsovits --- storage/merge_test.go | 308 ++++++++++++++++++++--------------------- storage/series.go | 6 +- storage/series_test.go | 10 +- 3 files changed, 162 insertions(+), 162 deletions(-) diff --git a/storage/merge_test.go b/storage/merge_test.go index 0060950d6f..5ffb0c4851 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -66,116 +66,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) { { name: "one querier, two series", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), ), }, { name: "two queriers, one different series each", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), }, { - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), ), }, { name: "two time unsorted queriers, two series each", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}), }}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}}, ), ), }, { name: "five queriers, only two queriers have two time unsorted series each", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}), }, {}}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}}, ), ), }, { name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}, fSample{0, 6, 6}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}), }, {}}, extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}, fSample{0, 6, 6}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}}, ), ), }, { name: "two queriers, with two series, one is overlapping", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 21}, fSample{0, 3, 31}, fSample{0, 5, 5}, fSample{0, 6, 6}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 22}, fSample{3, 32}}), - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}), + NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 22}, fSample{0, 3, 32}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}, fSample{0, 4, 4}}), }, {}}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 21}, fSample{0, 3, 31}, fSample{0, 5, 5}, fSample{0, 6, 6}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}}, ), ), }, { name: "two queries, one with NaN samples series", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}), }, { - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 1, 1}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}, fSample{1, 1}}), + NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}, fSample{0, 1, 1}}), ), }, } { @@ -249,108 +249,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) { { name: "one querier, two series", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), ), }, { name: "two secondaries, one different series each", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), ), }, { name: "two secondaries, two not in time order series each", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}), }}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{5, 5}}, - []chunks.Sample{fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 6, 6}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, - []chunks.Sample{fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, + []chunks.Sample{fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 4, 4}}, ), ), }, { name: "five secondaries, only two have two not in time order series each", chkQuerierSeries: [][]ChunkSeries{{}, {}, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}), }, {}}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{5, 5}}, - []chunks.Sample{fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 6, 6}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, - []chunks.Sample{fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, + []chunks.Sample{fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 4, 4}}, ), ), }, { name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 6, 6}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, []chunks.Sample{fSample{0, 2, 2}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 3, 3}}, []chunks.Sample{fSample{0, 4, 4}}), }}, extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{5, 5}}, - []chunks.Sample{fSample{6, 6}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 6, 6}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, - []chunks.Sample{fSample{2, 2}}, - []chunks.Sample{fSample{3, 3}}, - []chunks.Sample{fSample{4, 4}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}}, + []chunks.Sample{fSample{0, 2, 2}}, + []chunks.Sample{fSample{0, 3, 3}}, + []chunks.Sample{fSample{0, 4, 4}}, ), ), }, { name: "two queries, one with NaN samples series", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 1, 1}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}, []chunks.Sample{fSample{1, 1}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0, math.NaN()}}, []chunks.Sample{fSample{0, 1, 1}}), ), }, } { @@ -431,9 +431,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { { name: "single series", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), }, { name: "two empty series", @@ -446,55 +446,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { { name: "two non overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, { name: "two overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{7, 7}, fSample{8, 8}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 7, 7}, fSample{0, 8, 8}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, { name: "two duplicated", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), }, { name: "three overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 6}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}, fSample{0, 5, 5}, fSample{0, 6, 6}}), }, { name: "three in chained overlap", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 66}, fSample{10, 10}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 4, 4}, fSample{0, 5, 5}, fSample{0, 6, 66}, fSample{0, 10, 10}}), }, { name: "three in chained overlap complex", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{0, 0}, fSample{2, 2}, fSample{5, 5}, fSample{10, 10}, fSample{15, 15}, fSample{18, 18}, fSample{20, 20}, fSample{25, 25}, fSample{26, 26}, fSample{30, 30}}, - []chunks.Sample{fSample{31, 31}, fSample{35, 35}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 2, 2}, fSample{0, 5, 5}, fSample{0, 10, 10}, fSample{0, 15, 15}, fSample{0, 18, 18}, fSample{0, 20, 20}, fSample{0, 25, 25}, fSample{0, 26, 26}, fSample{0, 30, 30}}, + []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}, ), }, { @@ -534,13 +534,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { name: "histogram chunks overlapping with float chunks", input: []ChunkSeries{ NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(0), histogramSample(5)}, []chunks.Sample{histogramSample(10), histogramSample(15)}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 12, 12}}, []chunks.Sample{fSample{0, 14, 14}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(0)}, - []chunks.Sample{fSample{1, 1}}, + []chunks.Sample{fSample{0, 1, 1}}, []chunks.Sample{histogramSample(5), histogramSample(10)}, - []chunks.Sample{fSample{12, 12}, fSample{14, 14}}, + []chunks.Sample{fSample{0, 12, 12}, fSample{0, 14, 14}}, []chunks.Sample{histogramSample(15)}, ), }, @@ -560,13 +560,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { name: "float histogram chunks overlapping with float chunks", input: []ChunkSeries{ NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []chunks.Sample{floatHistogramSample(10), floatHistogramSample(15)}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 12, 12}}, []chunks.Sample{fSample{0, 14, 14}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0)}, - []chunks.Sample{fSample{1, 1}}, + []chunks.Sample{fSample{0, 1, 1}}, []chunks.Sample{floatHistogramSample(5), floatHistogramSample(10)}, - []chunks.Sample{fSample{12, 12}, fSample{14, 14}}, + []chunks.Sample{fSample{0, 12, 12}, fSample{0, 14, 14}}, []chunks.Sample{floatHistogramSample(15)}, ), }, @@ -736,9 +736,9 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) { { name: "single series", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}}), }, { name: "two empty series", @@ -751,70 +751,70 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) { { name: "two non overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, { name: "two overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}, - []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}}, []chunks.Sample{fSample{0, 3, 3}, fSample{0, 8, 8}}, + []chunks.Sample{fSample{0, 7, 7}, fSample{0, 9, 9}}, []chunks.Sample{fSample{0, 10, 10}}, ), }, { name: "two duplicated", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}, - []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}, ), }, { name: "three overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}, - []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}, - []chunks.Sample{fSample{0, 0}, fSample{4, 4}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 6, 6}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 4, 4}}, ), }, { name: "three in chained overlap", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}, - []chunks.Sample{fSample{4, 4}, fSample{6, 66}}, - []chunks.Sample{fSample{6, 6}, fSample{10, 10}}, + []chunks.Sample{fSample{0, 1, 1}, fSample{0, 2, 2}, fSample{0, 3, 3}, fSample{0, 5, 5}}, + []chunks.Sample{fSample{0, 4, 4}, fSample{0, 6, 66}}, + []chunks.Sample{fSample{0, 6, 6}, fSample{0, 10, 10}}, ), }, { name: "three in chained overlap complex", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}, - []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}, - []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}, + []chunks.Sample{fSample{0, 0, 0}, fSample{0, 5, 5}}, []chunks.Sample{fSample{0, 10, 10}, fSample{0, 15, 15}}, + []chunks.Sample{fSample{0, 2, 2}, fSample{0, 20, 20}}, []chunks.Sample{fSample{0, 25, 25}, fSample{0, 30, 30}}, + []chunks.Sample{fSample{0, 18, 18}, fSample{0, 26, 26}}, []chunks.Sample{fSample{0, 31, 31}, fSample{0, 35, 35}}, ), }, { @@ -1059,7 +1059,7 @@ func (*mockChunkSeriesSet) Warnings() annotations.Annotations { return nil } func TestChainSampleIterator(t *testing.T) { for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{ - "float": func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} }, + "float": func(ts int64) chunks.Sample { return fSample{0, ts, float64(ts)} }, "histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) }, "float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) }, } { @@ -1176,7 +1176,7 @@ func TestChainSampleIteratorHistogramCounterResetHint(t *testing.T) { func TestChainSampleIteratorSeek(t *testing.T) { for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{ - "float": func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} }, + "float": func(ts int64) chunks.Sample { return fSample{0, ts, float64(ts)} }, "histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) }, "float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) }, } { @@ -1224,13 +1224,13 @@ func TestChainSampleIteratorSeek(t *testing.T) { switch merged.Seek(tc.seek) { case chunkenc.ValFloat: t, f := merged.At() - actual = append(actual, fSample{t, f}) + actual = append(actual, fSample{0, t, f}) case chunkenc.ValHistogram: t, h := merged.AtHistogram(nil) - actual = append(actual, hSample{t, h}) + actual = append(actual, hSample{0, t, h}) case chunkenc.ValFloatHistogram: t, fh := merged.AtFloatHistogram(nil) - actual = append(actual, fhSample{t, fh}) + actual = append(actual, fhSample{0, t, fh}) } s, err := ExpandSamples(merged, nil) require.NoError(t, err) @@ -1243,7 +1243,7 @@ func TestChainSampleIteratorSeek(t *testing.T) { func TestChainSampleIteratorSeekFailingIterator(t *testing.T) { merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{ - NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}), + NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}), errIterator{errors.New("something went wrong")}, }) @@ -1253,7 +1253,7 @@ func TestChainSampleIteratorSeekFailingIterator(t *testing.T) { func TestChainSampleIteratorNextImmediatelyFailingIterator(t *testing.T) { merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{ - NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}), + NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}), errIterator{errors.New("something went wrong")}, }) @@ -1263,7 +1263,7 @@ func TestChainSampleIteratorNextImmediatelyFailingIterator(t *testing.T) { // Next() does some special handling for the first iterator, so make sure it handles the first iterator returning an error too. merged = ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{ errIterator{errors.New("something went wrong")}, - NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}), + NewListSeriesIterator(samples{fSample{0, 0, 0.1}, fSample{0, 1, 1.1}, fSample{0, 2, 2.1}}), }) require.Equal(t, chunkenc.ValNone, merged.Next()) @@ -1310,13 +1310,13 @@ func TestChainSampleIteratorSeekHistogramCounterResetHint(t *testing.T) { switch merged.Seek(tc.seek) { case chunkenc.ValFloat: t, f := merged.At() - actual = append(actual, fSample{t, f}) + actual = append(actual, fSample{0, t, f}) case chunkenc.ValHistogram: t, h := merged.AtHistogram(nil) - actual = append(actual, hSample{t, h}) + actual = append(actual, hSample{0, t, h}) case chunkenc.ValFloatHistogram: t, fh := merged.AtFloatHistogram(nil) - actual = append(actual, fhSample{t, fh}) + actual = append(actual, fhSample{0, t, fh}) } s, err := ExpandSamples(merged, nil) require.NoError(t, err) diff --git a/storage/series.go b/storage/series.go index d114438078..c16e628ba2 100644 --- a/storage/series.go +++ b/storage/series.go @@ -452,11 +452,11 @@ func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { switch { case h != nil: - return hSample{t, h} + return hSample{0, t, h} case fh != nil: - return fhSample{t, fh} + return fhSample{0, t, fh} default: - return fSample{t, f} + return fSample{0, t, f} } } } diff --git a/storage/series_test.go b/storage/series_test.go index 954d62f1b3..3ad84be6b0 100644 --- a/storage/series_test.go +++ b/storage/series_test.go @@ -28,11 +28,11 @@ import ( func TestListSeriesIterator(t *testing.T) { it := NewListSeriesIterator(samples{ - fSample{0, 0}, - fSample{1, 1}, - fSample{1, 1.5}, - fSample{2, 2}, - fSample{3, 3}, + fSample{0, 0, 0}, + fSample{0, 1, 1}, + fSample{0, 1, 1.5}, + fSample{0, 2, 2}, + fSample{0, 3, 3}, }) // Seek to the first sample with ts=1. From f616689f0980eb94e885ffd02e42fb59314b15db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 09:46:56 +0100 Subject: [PATCH 014/165] tsdb/head.go: add start timestamp to sample type used in tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- tsdb/head.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tsdb/head.go b/tsdb/head.go index 213846aa35..8db9231124 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -2101,20 +2101,20 @@ func (s *stripeSeries) postCreation(lset labels.Labels) { } type sample struct { + st int64 t int64 f float64 h *histogram.Histogram fh *histogram.FloatHistogram } -func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { - return sample{t, v, h, fh} +func newSample(st, t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { + return sample{st, t, v, h, fh} } func (s sample) T() int64 { return s.t } -// TODO(krajorama): implement ST. -func (sample) ST() int64 { return 0 } +func (s sample) ST() int64 { return s.st } func (s sample) F() float64 { return s.f } func (s sample) H() *histogram.Histogram { return s.h } func (s sample) FH() *histogram.FloatHistogram { return s.fh } From 28dca34f4ff257ec74121736ab29a600424d0dce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 09:55:45 +0100 Subject: [PATCH 015/165] auto update head sample use in tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit find . -name "*.go" -type f -exec sed -E -i \ 's/([^[:alpha:]]sample\{)([^,{:]+,[^,]+,[^,]+,[^,]+\})/\10, \2/g' {} + I've omitted tsdb/ooo_head.go from the commit because I'm also adding todo there. Signed-off-by: György Krajcsovits --- tsdb/block_test.go | 16 +- tsdb/db_append_v2_test.go | 16 +- tsdb/db_test.go | 36 +- tsdb/head_append_v2_test.go | 22 +- tsdb/head_test.go | 22 +- tsdb/querier_test.go | 722 ++++++++++++++++++------------------ 6 files changed, 417 insertions(+), 417 deletions(-) diff --git a/tsdb/block_test.go b/tsdb/block_test.go index 855fa5638a..edd2df7415 100644 --- a/tsdb/block_test.go +++ b/tsdb/block_test.go @@ -176,7 +176,7 @@ func TestCorruptedChunk(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tmpdir := t.TempDir() - series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{1, 1, nil, nil}}) + series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{0, 1, 1, nil, nil}}) blockDir := createBlock(t, tmpdir, []storage.Series{series}) files, err := sequenceFiles(chunkDir(blockDir)) require.NoError(t, err) @@ -236,7 +236,7 @@ func TestLabelValuesWithMatchers(t *testing.T) { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "tens", fmt.Sprintf("value%d", i/10), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) } blockDir := createBlock(t, tmpdir, seriesEntries) @@ -319,7 +319,7 @@ func TestBlockQuerierReturnsSortedLabelValues(t *testing.T) { for i := 100; i > 0; i-- { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "__name__", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) } blockDir := createBlock(t, tmpdir, seriesEntries) @@ -436,7 +436,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) { "a_unique", fmt.Sprintf("value%d", i), "b_tens", fmt.Sprintf("value%d", i/(metricCount/10)), "c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1" - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) } blockDir := createBlock(b, tmpdir, seriesEntries) @@ -472,13 +472,13 @@ func TestLabelNamesWithMatchers(t *testing.T) { for i := range 100 { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) if i%10 == 0 { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "tens", fmt.Sprintf("value%d", i/10), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) } if i%20 == 0 { @@ -486,7 +486,7 @@ func TestLabelNamesWithMatchers(t *testing.T) { "tens", fmt.Sprintf("value%d", i/10), "twenties", fmt.Sprintf("value%d", i/20), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{0, 100, 0, nil, nil}})) } } @@ -542,7 +542,7 @@ func TestBlockIndexReader_PostingsForLabelMatching(t *testing.T) { testPostingsForLabelMatching(t, 2, func(t *testing.T, series []labels.Labels) IndexReader { var seriesEntries []storage.Series for _, s := range series { - seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{100, 0, nil, nil}})) + seriesEntries = append(seriesEntries, storage.NewListSeries(s, []chunks.Sample{sample{0, 100, 0, nil, nil}})) } blockDir := createBlock(t, t.TempDir(), seriesEntries) diff --git a/tsdb/db_append_v2_test.go b/tsdb/db_append_v2_test.go index 344b1d6943..16134e8c93 100644 --- a/tsdb/db_append_v2_test.go +++ b/tsdb/db_append_v2_test.go @@ -372,7 +372,7 @@ func TestDeleteSimple_AppendV2(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -507,7 +507,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) { ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}}, }, ssMap) // Append Out of Order Value. @@ -524,7 +524,7 @@ func TestSkippingInvalidValuesInSameTxn_AppendV2(t *testing.T) { ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}}, }, ssMap) } @@ -669,7 +669,7 @@ func TestDB_SnapshotWithDelete_AppendV2(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -772,7 +772,7 @@ func TestDB_e2e_AppendV2(t *testing.T) { for range numDatapoints { v := rand.Float64() - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{0, ts, v, nil, nil}) _, err := app.Append(0, lset, 0, ts, v, nil, nil, storage.AOptions{}) require.NoError(t, err) @@ -1094,7 +1094,7 @@ func TestTombstoneClean_AppendV2(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -2310,7 +2310,7 @@ func TestCompactHead_AppendV2(t *testing.T) { val := rand.Float64() _, err := app.Append(0, labels.FromStrings("a", "b"), 0, int64(i), val, nil, nil, storage.AOptions{}) require.NoError(t, err) - expSamples = append(expSamples, sample{int64(i), val, nil, nil}) + expSamples = append(expSamples, sample{0, int64(i), val, nil, nil}) } require.NoError(t, app.Commit()) @@ -2337,7 +2337,7 @@ func TestCompactHead_AppendV2(t *testing.T) { series = seriesSet.At().Iterator(series) for series.Next() == chunkenc.ValFloat { time, val := series.At() - actSamples = append(actSamples, sample{time, val, nil, nil}) + actSamples = append(actSamples, sample{0, time, val, nil, nil}) } require.NoError(t, series.Err()) } diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 299ade8826..5e57982b5d 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -546,7 +546,7 @@ func TestDeleteSimple(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -691,7 +691,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}}, }, ssMap) // Append Out of Order Value. @@ -708,7 +708,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 0, 1, nil, nil}, sample{0, 10, 3, nil, nil}}, }, ssMap) } @@ -853,7 +853,7 @@ func TestDB_SnapshotWithDelete(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -956,7 +956,7 @@ func TestDB_e2e(t *testing.T) { for range numDatapoints { v := rand.Float64() - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{0, ts, v, nil, nil}) _, err := app.Append(0, lset, ts, v) require.NoError(t, err) @@ -1278,7 +1278,7 @@ func TestTombstoneClean(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{0, ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -2863,11 +2863,11 @@ func assureChunkFromSamples(t *testing.T, samples []chunks.Sample) chunks.Meta { // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // that the resulted segments includes the expected chunks data. func TestChunkWriter_ReadAfterWrite(t *testing.T) { - chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}) - chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}) - chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}) - chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}) - chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}) + chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}}) + chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}}) + chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}}) + chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}}) + chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}}) chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size tests := []struct { @@ -3069,11 +3069,11 @@ func TestRangeForTimestamp(t *testing.T) { func TestChunkReader_ConcurrentReads(t *testing.T) { t.Parallel() chks := []chunks.Meta{ - assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 1, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 2, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 3, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 4, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{0, 1, 5, nil, nil}}), } tempDir := t.TempDir() @@ -3133,7 +3133,7 @@ func TestCompactHead(t *testing.T) { val := rand.Float64() _, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val) require.NoError(t, err) - expSamples = append(expSamples, sample{int64(i), val, nil, nil}) + expSamples = append(expSamples, sample{0, int64(i), val, nil, nil}) } require.NoError(t, app.Commit()) @@ -3160,7 +3160,7 @@ func TestCompactHead(t *testing.T) { series = seriesSet.At().Iterator(series) for series.Next() == chunkenc.ValFloat { time, val := series.At() - actSamples = append(actSamples, sample{time, val, nil, nil}) + actSamples = append(actSamples, sample{0, time, val, nil, nil}) } require.NoError(t, series.Err()) } diff --git a/tsdb/head_append_v2_test.go b/tsdb/head_append_v2_test.go index 33bc3aec38..892a5b3bfe 100644 --- a/tsdb/head_append_v2_test.go +++ b/tsdb/head_append_v2_test.go @@ -312,8 +312,8 @@ func TestHeadAppenderV2_WALMultiRef(t *testing.T) { // The samples before the new ref should be discarded since Head truncation // happens only after compacting the Head. require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: { - sample{1700, 3, nil, nil}, - sample{2000, 4, nil, nil}, + sample{0, 1700, 3, nil, nil}, + sample{0, 2000, 4, nil, nil}, }}, series) } @@ -605,7 +605,7 @@ func TestHeadAppenderV2_DeleteUntilCurrMax(t *testing.T) { it = exps.Iterator(nil) resSamples, err := storage.ExpandSamples(it, newSample) require.NoError(t, err) - require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples) + require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples) for res.Next() { } require.NoError(t, res.Err()) @@ -722,7 +722,7 @@ func TestHeadAppenderV2_Delete_e2e(t *testing.T) { v := rand.Float64() _, err := app.Append(0, ls, 0, ts, v, nil, nil, storage.AOptions{}) require.NoError(t, err) - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{0, ts, v, nil, nil}) ts += rand.Int63n(timeInterval) + 1 } seriesMap[labels.New(l...).String()] = series @@ -1520,7 +1520,7 @@ func TestDataMissingOnQueryDuringCompaction_AppenderV2(t *testing.T) { ref, err = app.Append(ref, labels.FromStrings("a", "b"), 0, ts, float64(i), nil, nil, storage.AOptions{}) require.NoError(t, err) maxt = ts - expSamples = append(expSamples, sample{ts, float64(i), nil, nil}) + expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil}) } require.NoError(t, app.Commit()) @@ -2166,17 +2166,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) { aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)} } val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil}) _, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil}) _, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{}) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist}) _, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{}) require.NoError(t, err) @@ -2244,17 +2244,17 @@ func TestChunkSnapshot_AppenderV2(t *testing.T) { aOpts.Exemplars = []exemplar.Exemplar{newExemplar(lbls, ts)} } val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil}) _, err := app.Append(0, lbls, 0, ts, val, nil, nil, aOpts) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil}) _, err = app.Append(0, lblsHist, 0, ts, 0, hist, nil, storage.AOptions{}) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist}) _, err = app.Append(0, lblsFloatHist, 0, ts, 0, nil, floatHist, storage.AOptions{}) require.NoError(t, err) diff --git a/tsdb/head_test.go b/tsdb/head_test.go index acdf0ee000..d0928d64bf 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -841,8 +841,8 @@ func TestHead_WALMultiRef(t *testing.T) { // The samples before the new ref should be discarded since Head truncation // happens only after compacting the Head. require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: { - sample{1700, 3, nil, nil}, - sample{2000, 4, nil, nil}, + sample{0, 1700, 3, nil, nil}, + sample{0, 2000, 4, nil, nil}, }}, series) } @@ -1859,7 +1859,7 @@ func TestDeleteUntilCurMax(t *testing.T) { it = exps.Iterator(nil) resSamples, err := storage.ExpandSamples(it, newSample) require.NoError(t, err) - require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples) + require.Equal(t, []chunks.Sample{sample{0, 11, 1, nil, nil}}, resSamples) for res.Next() { } require.NoError(t, res.Err()) @@ -1976,7 +1976,7 @@ func TestDelete_e2e(t *testing.T) { v := rand.Float64() _, err := app.Append(0, ls, ts, v) require.NoError(t, err) - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{0, ts, v, nil, nil}) ts += rand.Int63n(timeInterval) + 1 } seriesMap[labels.New(l...).String()] = series @@ -3838,7 +3838,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) { ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i)) require.NoError(t, err) maxt = ts - expSamples = append(expSamples, sample{ts, float64(i), nil, nil}) + expSamples = append(expSamples, sample{0, ts, float64(i), nil, nil}) } require.NoError(t, app.Commit()) @@ -4503,17 +4503,17 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(1); ts <= 240; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil}) _, err = app.AppendHistogram(0, lblsHist, ts, hist, nil) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist}) _, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist) require.NoError(t, err) @@ -4577,17 +4577,17 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(241); ts <= 480; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{0, ts, val, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{0, ts, 0, hist, nil}) _, err = app.AppendHistogram(0, lblsHist, ts, hist, nil) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{0, ts, 0, nil, floatHist}) _, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist) require.NoError(t, err) diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 57a53c46fe..9ff5124074 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -318,24 +318,24 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}, sample{0, 5, 1, nil, nil}, sample{0, 6, 7, nil, nil}, sample{0, 7, 2, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}}, []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}}, []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}}, []chunks.Sample{sample{0, 5, 1, nil, nil}, sample{0, 6, 7, nil, nil}, sample{0, 7, 2, nil, nil}}, ), }), }, @@ -345,18 +345,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}}, []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}}, []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), }, @@ -369,20 +369,20 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), }, @@ -395,18 +395,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), }, @@ -454,24 +454,24 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}, sample{0, 5, 1, nil, nil}, sample{0, 6, 7, nil, nil}, sample{0, 7, 2, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}, sample{0, 5, 1, nil, nil}, sample{0, 6, 7, nil, nil}, sample{0, 7, 2, nil, nil}}, ), }), }, @@ -481,18 +481,18 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 2, 3, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), }, @@ -537,18 +537,18 @@ func TestBlockQuerier_TrimmingDoesNotModifyOriginalTombstoneIntervals(t *testing ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 3, 4, nil, nil}, sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 3, 3, nil, nil}, sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 3, 4, nil, nil}}, []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{0, 3, 3, nil, nil}}, []chunks.Sample{sample{0, 5, 3, nil, nil}, sample{0, 6, 6, nil, nil}}, ), }), } @@ -636,24 +636,24 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}, sample{0, 5, 1, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}, sample{0, 7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil}}, + []chunks.Sample{sample{0, 1, 3, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 6, nil, nil}}, []chunks.Sample{sample{0, 5, 1, nil, nil}}, ), }), }, @@ -663,18 +663,18 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 2, nil, nil}, sample{0, 6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{0, 5, 3, nil, nil}}, ), }), }, @@ -875,15 +875,15 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "one chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -891,19 +891,19 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two full chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}}, @@ -911,23 +911,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "three full chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, - {sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, + {sample{0, 10, 22, nil, nil}, sample{0, 203, 3493, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, sample{0, 10, 22, nil, nil}, sample{0, 203, 3493, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, + sample{0, 10, 22, nil, nil}, sample{0, 203, 3493, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}, {10, 203}}, @@ -943,8 +943,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek beyond chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, seek: 10, @@ -953,27 +953,27 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek on middle of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, seek: 2, seekSuccess: true, expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }, }, { name: "two chunks and seek before first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, seek: -32, seekSuccess: true, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }, }, // Deletion / Trim cases. @@ -985,20 +985,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, + sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, + sample{0, 7, 89, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}, {7, 7}}, @@ -1006,20 +1006,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed middle sample of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 6, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}}, @@ -1027,20 +1027,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with deletion across two chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{9, 8, nil, nil}, + sample{0, 9, 8, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}, {9, 9}}, @@ -1048,17 +1048,17 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with first chunk deleted", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 6}}, expected: []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 9}}, @@ -1067,22 +1067,22 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), seek: 3, seekSuccess: true, expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, + sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 89, nil, nil}, }, }, { name: "one chunk where all samples are trimmed", samples: [][]chunks.Sample{ - {sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, + {sample{0, 7, 89, nil, nil}, sample{0, 9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 3}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}), @@ -1093,24 +1093,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1119,21 +1119,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1142,23 +1142,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, + sample{0, 3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1167,24 +1167,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, }, }, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1193,21 +1193,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1216,23 +1216,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, + sample{0, 3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1241,24 +1241,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1267,21 +1267,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1290,23 +1290,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{0, 1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, + sample{0, 2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, + sample{0, 3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1315,24 +1315,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }, }, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1341,21 +1341,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1364,23 +1364,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{0, 1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, + sample{0, 2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, + sample{0, 3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1388,31 +1388,31 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "three full mixed chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}}, { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }, { - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{0, 1, 2, nil, nil}, sample{0, 2, 3, nil, nil}, sample{0, 3, 5, nil, nil}, sample{0, 6, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}, {10, 203}}, @@ -1421,30 +1421,30 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks in different order", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}}, { - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}, sample{0, 100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, + sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 9}, {11, 16}, {100, 203}}, @@ -1453,29 +1453,29 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks in different order intersect with deletion interval", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}}, { - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, }, intervals: tombstones.Intervals{{Mint: 8, Maxt: 11}, {Mint: 15, Maxt: 150}}, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, + sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 7}, {12, 13}, {203, 203}}, @@ -1484,30 +1484,30 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks overlapping", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}}, { - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{0, 12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}, sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{0, 12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, + sample{0, 11, 2, nil, nil}, sample{0, 12, 3, nil, nil}, sample{0, 13, 5, nil, nil}, sample{0, 16, 1, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, + sample{0, 203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 12}, {11, 16}, {10, 203}}, @@ -1516,56 +1516,56 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "int histogram iterables with counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.GenerateTestHistogram(9), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 12, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 15, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 16, 0, tsdbutil.GenerateTestHistogram(7), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 17, 0, tsdbutil.GenerateTestHistogram(5), nil}, }, { - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 19, 0, tsdbutil.GenerateTestHistogram(7), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 20, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 21, 0, tsdbutil.GenerateTestHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{0, 12, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 15, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 16, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 17, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 19, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 20, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{0, 21, 0, tsdbutil.GenerateTestHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(9)), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(9)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, - sample{15, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, - sample{16, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{0, 12, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, + sample{0, 15, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 16, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{17, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, + sample{0, 17, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 19, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{20, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, - sample{21, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{0, 20, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, + sample{0, 21, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1585,56 +1585,56 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "float histogram iterables with counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, + sample{0, 7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, + sample{0, 8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, // Counter reset should be detected when chunks are created from the iterable. - sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{0, 12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, // Counter reset should be detected when chunks are created from the iterable. - sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, }, { - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{0, 18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, // Counter reset should be detected when chunks are created from the iterable. - sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, }, }, expected: []chunks.Sample{ - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, - sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, - sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, - sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, + sample{0, 8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, + sample{0, 12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{0, 17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{0, 20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{0, 21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(9))}, + sample{0, 7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, + sample{0, 8, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(9))}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, - sample{15, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, - sample{16, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, + sample{0, 12, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, + sample{0, 15, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 16, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{17, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, + sample{0, 17, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, + sample{0, 18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{0, 19, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{20, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, - sample{21, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{0, 20, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, + sample{0, 21, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1654,61 +1654,61 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "iterables with mixed encodings and counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{0, 9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, + sample{0, 11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, + sample{0, 12, 13, nil, nil}, + sample{0, 13, 14, nil, nil}, + sample{0, 14, 0, tsdbutil.GenerateTestHistogram(8), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 15, 0, tsdbutil.GenerateTestHistogram(7), nil}, }, { - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 45, nil, nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 19, 45, nil, nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 45, nil, nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{0, 9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, + sample{0, 11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, + sample{0, 12, 13, nil, nil}, + sample{0, 13, 14, nil, nil}, + sample{0, 14, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 15, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 19, 45, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 8, 0, tsdbutil.GenerateTestHistogram(9), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, + sample{0, 9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, + sample{0, 10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, + sample{0, 11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, + sample{0, 12, 13, nil, nil}, + sample{0, 13, 14, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{0, 14, 0, tsdbutil.GenerateTestHistogram(8), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{15, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{0, 15, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{0, 18, 0, tsdbutil.GenerateTestHistogram(6), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{19, 45, nil, nil}, + sample{0, 19, 45, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1849,8 +1849,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ {}, - {sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, - {sample{4, 4, nil, nil}, sample{5, 5, nil, nil}}, + {sample{0, 1, 1, nil, nil}, sample{0, 2, 2, nil, nil}, sample{0, 3, 3, nil, nil}}, + {sample{0, 4, 4, nil, nil}, sample{0, 5, 5, nil, nil}}, }, }, { @@ -1858,8 +1858,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}}, - {sample{4, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(5), nil}}, + {sample{0, 1, 0, tsdbutil.GenerateTestHistogram(1), nil}, sample{0, 2, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{0, 3, 0, tsdbutil.GenerateTestHistogram(3), nil}}, + {sample{0, 4, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{0, 5, 0, tsdbutil.GenerateTestHistogram(5), nil}}, }, }, { @@ -1867,8 +1867,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}}, - {sample{4, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}}, + {sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, sample{0, 2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{0, 3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}}, + {sample{0, 4, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{0, 5, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}}, }, }, } @@ -1902,7 +1902,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ {}, - {sample{1, 2, nil, nil}, sample{3, 4, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, + {sample{0, 1, 2, nil, nil}, sample{0, 3, 4, nil, nil}, sample{0, 5, 6, nil, nil}, sample{0, 7, 8, nil, nil}}, {}, }, }, @@ -1911,7 +1911,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{0, 1, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{0, 3, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{0, 5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, {}, }, }, @@ -1920,7 +1920,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{0, 3, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{0, 5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{0, 7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, {}, }, }, @@ -1952,21 +1952,21 @@ func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { name: "float", valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ - {sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{6, 8, nil, nil}}, + {sample{0, 1, 6, nil, nil}, sample{0, 5, 6, nil, nil}, sample{0, 6, 8, nil, nil}}, }, }, { name: "histogram", valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{6, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{0, 1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{0, 5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{0, 6, 0, tsdbutil.GenerateTestHistogram(8), nil}}, }, }, { name: "float histogram", valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{0, 5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{0, 6, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, }, }, } @@ -1995,21 +1995,21 @@ func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { name: "float", valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ - {sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, + {sample{0, 1, 6, nil, nil}, sample{0, 5, 6, nil, nil}, sample{0, 7, 8, nil, nil}}, }, }, { name: "histogram", valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{0, 1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{0, 5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{0, 7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, }, }, { name: "float histogram", valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{0, 1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{0, 5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{0, 7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, }, }, } From 6647e512ad7c161ab8f0721773752185adbe1ff5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 10:01:55 +0100 Subject: [PATCH 016/165] update ExpandSamples MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- storage/series.go | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/storage/series.go b/storage/series.go index c16e628ba2..ebc5a16c07 100644 --- a/storage/series.go +++ b/storage/series.go @@ -447,16 +447,16 @@ func (e errChunksIterator) Err() error { return e.err } // ExpandSamples iterates over all samples in the iterator, buffering all in slice. // Optionally it takes samples constructor, useful when you want to compare sample slices with different // sample implementations. if nil, sample type from this package will be used. -func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample) ([]chunks.Sample, error) { +func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(st, t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample) ([]chunks.Sample, error) { if newSampleFn == nil { - newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { + newSampleFn = func(st, t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { switch { case h != nil: - return hSample{0, t, h} + return hSample{st, t, h} case fh != nil: - return fhSample{0, t, fh} + return fhSample{st, t, fh} default: - return fSample{0, t, f} + return fSample{st, t, f} } } } @@ -468,17 +468,20 @@ func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, return result, iter.Err() case chunkenc.ValFloat: t, f := iter.At() + st := iter.AtST() // NaNs can't be compared normally, so substitute for another value. if math.IsNaN(f) { f = -42 } - result = append(result, newSampleFn(t, f, nil, nil)) + result = append(result, newSampleFn(st, t, f, nil, nil)) case chunkenc.ValHistogram: t, h := iter.AtHistogram(nil) - result = append(result, newSampleFn(t, 0, h, nil)) + st := iter.AtST() + result = append(result, newSampleFn(st, t, 0, h, nil)) case chunkenc.ValFloatHistogram: t, fh := iter.AtFloatHistogram(nil) - result = append(result, newSampleFn(t, 0, nil, fh)) + st := iter.AtST() + result = append(result, newSampleFn(st, t, 0, nil, fh)) } } } From a5ac0bff1d1fe8a167fc9b982f692b39190090a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 10:02:13 +0100 Subject: [PATCH 017/165] update ooo_head.go but only with TODOs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- tsdb/ooo_head.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tsdb/ooo_head.go b/tsdb/ooo_head.go index bbb0f10e77..f9746c4c61 100644 --- a/tsdb/ooo_head.go +++ b/tsdb/ooo_head.go @@ -40,7 +40,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog // try to append at the end first if the new timestamp is higher than the // last known timestamp. if len(o.samples) == 0 || t > o.samples[len(o.samples)-1].t { - o.samples = append(o.samples, sample{t, v, h, fh}) + // TODO(krajorama): pass ST. + o.samples = append(o.samples, sample{0, t, v, h, fh}) return true } @@ -49,7 +50,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog if i >= len(o.samples) { // none found. append it at the end - o.samples = append(o.samples, sample{t, v, h, fh}) + // TODO(krajorama): pass ST. + o.samples = append(o.samples, sample{0, t, v, h, fh}) return true } @@ -61,7 +63,8 @@ func (o *OOOChunk) Insert(t int64, v float64, h *histogram.Histogram, fh *histog // Expand length by 1 to make room. use a zero sample, we will overwrite it anyway. o.samples = append(o.samples, sample{}) copy(o.samples[i+1:], o.samples[i:]) - o.samples[i] = sample{t, v, h, fh} + // TODO(krajorama): pass ST. + o.samples[i] = sample{0, t, v, h, fh} return true } From adf734db7a8a314b95ed8b88b960400b7e2f11d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 10:02:26 +0100 Subject: [PATCH 018/165] update remaining tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- tsdb/head_test.go | 6 +++--- tsdb/querier_test.go | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tsdb/head_test.go b/tsdb/head_test.go index d0928d64bf..bcf9b52f34 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -745,7 +745,7 @@ func TestHead_ReadWAL(t *testing.T) { // Verify samples and exemplar for series 10. c, _, _, err := s10.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{100, 2, nil, nil}, {101, 5, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{0, 100, 2, nil, nil}, {0, 101, 5, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) q, err := head.ExemplarQuerier(context.Background()) require.NoError(t, err) @@ -758,14 +758,14 @@ func TestHead_ReadWAL(t *testing.T) { // Verify samples for series 50 c, _, _, err = s50.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{101, 6, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{0, 101, 6, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) // Verify records for series 100 and its duplicate, series 101. // The samples before the new series record should be discarded since a duplicate record // is only possible when old samples were compacted. c, _, _, err = s100.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{101, 7, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{0, 101, 7, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) q, err = head.ExemplarQuerier(context.Background()) require.NoError(t, err) diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 9ff5124074..4387635959 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -574,22 +574,22 @@ var testData = []seriesSamples{ { lset: map[string]string{"a": "a"}, chunks: [][]sample{ - {{1, 2, nil, nil}, {2, 3, nil, nil}, {3, 4, nil, nil}}, - {{5, 2, nil, nil}, {6, 3, nil, nil}, {7, 4, nil, nil}}, + {{0, 1, 2, nil, nil}, {0, 2, 3, nil, nil}, {0, 3, 4, nil, nil}}, + {{0, 5, 2, nil, nil}, {0, 6, 3, nil, nil}, {0, 7, 4, nil, nil}}, }, }, { lset: map[string]string{"a": "a", "b": "b"}, chunks: [][]sample{ - {{1, 1, nil, nil}, {2, 2, nil, nil}, {3, 3, nil, nil}}, - {{5, 3, nil, nil}, {6, 6, nil, nil}}, + {{0, 1, 1, nil, nil}, {0, 2, 2, nil, nil}, {0, 3, 3, nil, nil}}, + {{0, 5, 3, nil, nil}, {0, 6, 6, nil, nil}}, }, }, { lset: map[string]string{"b": "b"}, chunks: [][]sample{ - {{1, 3, nil, nil}, {2, 2, nil, nil}, {3, 6, nil, nil}}, - {{5, 1, nil, nil}, {6, 7, nil, nil}, {7, 2, nil, nil}}, + {{0, 1, 3, nil, nil}, {0, 2, 2, nil, nil}, {0, 3, 6, nil, nil}}, + {{0, 5, 1, nil, nil}, {0, 6, 7, nil, nil}, {0, 7, 2, nil, nil}}, }, }, } From 8067b3d60ac9231026513da5e520ddc54d804bec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 12:13:59 +0100 Subject: [PATCH 019/165] add test coverage for buffer.go MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I've checked that commenting out any one of the new lines produces an error. Signed-off-by: György Krajcsovits --- storage/buffer_test.go | 122 ++++++++++++++++++++++++++--------------- 1 file changed, 78 insertions(+), 44 deletions(-) diff --git a/storage/buffer_test.go b/storage/buffer_test.go index e700231756..61d1601bc0 100644 --- a/storage/buffer_test.go +++ b/storage/buffer_test.go @@ -61,7 +61,9 @@ func TestSampleRing(t *testing.T) { input := []fSample{} for _, t := range c.input { - input = append(input, fSample{t: t, f: float64(rand.Intn(100))}) + // Randomize start timestamp to make sure it does not affect the + // outcome. + input = append(input, fSample{st: rand.Int63(), t: t, f: float64(rand.Intn(100))}) } for i, s := range input { @@ -87,6 +89,24 @@ func TestSampleRing(t *testing.T) { } } +func TestSampleRingFloatST(t *testing.T) { + r := newSampleRing(10, 5, chunkenc.ValNone) + require.Empty(t, r.fBuf) + require.Empty(t, r.hBuf) + require.Empty(t, r.fhBuf) + require.Empty(t, r.iBuf) + + r.addF(fSample{st: 100, t: 11, f: 3.14}) + it := r.iterator() + + require.Equal(t, chunkenc.ValFloat, it.Next()) + ts, f := it.At() + require.Equal(t, int64(11), ts) + require.Equal(t, 3.14, f) + require.Equal(t, int64(100), it.AtST()) + require.Equal(t, chunkenc.ValNone, it.Next()) +} + func TestSampleRingMixed(t *testing.T) { h1 := tsdbutil.GenerateTestHistogram(1) h2 := tsdbutil.GenerateTestHistogram(2) @@ -99,39 +119,43 @@ func TestSampleRingMixed(t *testing.T) { require.Empty(t, r.iBuf) // But then mixed adds should work as expected. - r.addF(fSample{t: 1, f: 3.14}) - r.addH(hSample{t: 2, h: h1}) + r.addF(fSample{st: 10, t: 11, f: 3.14}) + r.addH(hSample{st: 20, t: 21, h: h1}) it := r.iterator() require.Equal(t, chunkenc.ValFloat, it.Next()) ts, f := it.At() - require.Equal(t, int64(1), ts) + require.Equal(t, int64(11), ts) require.Equal(t, 3.14, f) + require.Equal(t, int64(10), it.AtST()) require.Equal(t, chunkenc.ValHistogram, it.Next()) var h *histogram.Histogram ts, h = it.AtHistogram() - require.Equal(t, int64(2), ts) + require.Equal(t, int64(21), ts) require.Equal(t, h1, h) + require.Equal(t, int64(20), it.AtST()) require.Equal(t, chunkenc.ValNone, it.Next()) r.reset() it = r.iterator() require.Equal(t, chunkenc.ValNone, it.Next()) - r.addF(fSample{t: 3, f: 4.2}) - r.addH(hSample{t: 4, h: h2}) + r.addF(fSample{st: 30, t: 31, f: 4.2}) + r.addH(hSample{st: 40, t: 41, h: h2}) it = r.iterator() require.Equal(t, chunkenc.ValFloat, it.Next()) ts, f = it.At() - require.Equal(t, int64(3), ts) + require.Equal(t, int64(31), ts) require.Equal(t, 4.2, f) + require.Equal(t, int64(30), it.AtST()) require.Equal(t, chunkenc.ValHistogram, it.Next()) ts, h = it.AtHistogram() - require.Equal(t, int64(4), ts) + require.Equal(t, int64(41), ts) require.Equal(t, h2, h) + require.Equal(t, int64(40), it.AtST()) require.Equal(t, chunkenc.ValNone, it.Next()) } @@ -157,44 +181,50 @@ func TestSampleRingAtFloatHistogram(t *testing.T) { it := r.iterator() require.Equal(t, chunkenc.ValNone, it.Next()) - r.addFH(fhSample{t: 1, fh: fh1}) - r.addFH(fhSample{t: 2, fh: fh2}) + r.addFH(fhSample{st: 10, t: 11, fh: fh1}) + r.addFH(fhSample{st: 20, t: 21, fh: fh2}) it = r.iterator() require.Equal(t, chunkenc.ValFloatHistogram, it.Next()) ts, fh = it.AtFloatHistogram(fh) - require.Equal(t, int64(1), ts) + require.Equal(t, int64(11), ts) require.Equal(t, fh1, fh) + require.Equal(t, int64(10), it.AtST()) require.Equal(t, chunkenc.ValFloatHistogram, it.Next()) ts, fh = it.AtFloatHistogram(fh) - require.Equal(t, int64(2), ts) + require.Equal(t, int64(21), ts) require.Equal(t, fh2, fh) + require.Equal(t, int64(20), it.AtST()) require.Equal(t, chunkenc.ValNone, it.Next()) r.reset() it = r.iterator() require.Equal(t, chunkenc.ValNone, it.Next()) - r.addH(hSample{t: 3, h: h1}) - r.addH(hSample{t: 4, h: h2}) + r.addH(hSample{st: 30, t: 31, h: h1}) + r.addH(hSample{st: 40, t: 41, h: h2}) it = r.iterator() require.Equal(t, chunkenc.ValHistogram, it.Next()) ts, h = it.AtHistogram() - require.Equal(t, int64(3), ts) + require.Equal(t, int64(31), ts) require.Equal(t, h1, h) + require.Equal(t, int64(30), it.AtST()) ts, fh = it.AtFloatHistogram(fh) - require.Equal(t, int64(3), ts) + require.Equal(t, int64(31), ts) require.Equal(t, h1.ToFloat(nil), fh) + require.Equal(t, int64(30), it.AtST()) require.Equal(t, chunkenc.ValHistogram, it.Next()) ts, h = it.AtHistogram() - require.Equal(t, int64(4), ts) + require.Equal(t, int64(41), ts) require.Equal(t, h2, h) + require.Equal(t, int64(40), it.AtST()) ts, fh = it.AtFloatHistogram(fh) - require.Equal(t, int64(4), ts) + require.Equal(t, int64(41), ts) require.Equal(t, h2.ToFloat(nil), fh) + require.Equal(t, int64(40), it.AtST()) require.Equal(t, chunkenc.ValNone, it.Next()) } @@ -206,59 +236,63 @@ func TestBufferedSeriesIterator(t *testing.T) { bit := it.Buffer() for bit.Next() == chunkenc.ValFloat { t, f := bit.At() - b = append(b, fSample{t: t, f: f}) + st := bit.AtST() + b = append(b, fSample{st: st, t: t, f: f}) } require.Equal(t, exp, b, "buffer mismatch") } - sampleEq := func(ets int64, ev float64) { + sampleEq := func(est, ets int64, ev float64) { ts, v := it.At() + st := it.AtST() + require.Equal(t, est, st, "start timestamp mismatch") require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ev, v, "value mismatch") } - prevSampleEq := func(ets int64, ev float64, eok bool) { + prevSampleEq := func(est, ets int64, ev float64, eok bool) { s, ok := it.PeekBack(1) require.Equal(t, eok, ok, "exist mismatch") + require.Equal(t, est, s.ST(), "start timestamp mismatch") require.Equal(t, ets, s.T(), "timestamp mismatch") require.Equal(t, ev, s.F(), "value mismatch") } it = NewBufferIterator(NewListSeriesIterator(samples{ - fSample{t: 1, f: 2}, - fSample{t: 2, f: 3}, - fSample{t: 3, f: 4}, - fSample{t: 4, f: 5}, - fSample{t: 5, f: 6}, - fSample{t: 99, f: 8}, - fSample{t: 100, f: 9}, - fSample{t: 101, f: 10}, + fSample{st: -1, t: 1, f: 2}, + fSample{st: 1, t: 2, f: 3}, + fSample{st: 2, t: 3, f: 4}, + fSample{st: 3, t: 4, f: 5}, + fSample{st: 3, t: 5, f: 6}, + fSample{st: 50, t: 99, f: 8}, + fSample{st: 99, t: 100, f: 9}, + fSample{st: 100, t: 101, f: 10}, }), 2) require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed") - sampleEq(1, 2) - prevSampleEq(0, 0, false) + sampleEq(-1, 1, 2) + prevSampleEq(0, 0, 0, false) bufferEq(nil) require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") - sampleEq(2, 3) - prevSampleEq(1, 2, true) - bufferEq([]fSample{{t: 1, f: 2}}) + sampleEq(1, 2, 3) + prevSampleEq(-1, 1, 2, true) + bufferEq([]fSample{{st: -1, t: 1, f: 2}}) require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") - sampleEq(5, 6) - prevSampleEq(4, 5, true) - bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}}) + sampleEq(3, 5, 6) + prevSampleEq(3, 4, 5, true) + bufferEq([]fSample{{st: 1, t: 2, f: 3}, {st: 2, t: 3, f: 4}, {st: 3, t: 4, f: 5}}) require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed") - sampleEq(5, 6) - prevSampleEq(4, 5, true) - bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}}) + sampleEq(3, 5, 6) + prevSampleEq(3, 4, 5, true) + bufferEq([]fSample{{st: 1, t: 2, f: 3}, {st: 2, t: 3, f: 4}, {st: 3, t: 4, f: 5}}) require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed") - sampleEq(101, 10) - prevSampleEq(100, 9, true) - bufferEq([]fSample{{t: 99, f: 8}, {t: 100, f: 9}}) + sampleEq(100, 101, 10) + prevSampleEq(99, 100, 9, true) + bufferEq([]fSample{{st: 50, t: 99, f: 8}, {st: 99, t: 100, f: 9}}) require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly") require.Equal(t, chunkenc.ValNone, it.Seek(1024), "seek succeeded unexpectedly") From 5ecc0e706295fc795218c874b5f94ffe4fdceca4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 12:19:43 +0100 Subject: [PATCH 020/165] test that ChainSampleIterator passes on the AtST call MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- storage/merge_test.go | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/storage/merge_test.go b/storage/merge_test.go index 5ffb0c4851..e42a6a4ce1 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -387,13 +387,13 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) { func histogramSample(ts int64, hint histogram.CounterResetHint) hSample { h := tsdbutil.GenerateTestHistogram(ts + 1) h.CounterResetHint = hint - return hSample{t: ts, h: h} + return hSample{st: -ts, t: ts, h: h} } func floatHistogramSample(ts int64, hint histogram.CounterResetHint) fhSample { fh := tsdbutil.GenerateTestFloatHistogram(ts + 1) fh.CounterResetHint = hint - return fhSample{t: ts, fh: fh} + return fhSample{st: -ts, t: ts, fh: fh} } // Shorthands for counter reset hints. @@ -1059,7 +1059,7 @@ func (*mockChunkSeriesSet) Warnings() annotations.Annotations { return nil } func TestChainSampleIterator(t *testing.T) { for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{ - "float": func(ts int64) chunks.Sample { return fSample{0, ts, float64(ts)} }, + "float": func(ts int64) chunks.Sample { return fSample{-ts, ts, float64(ts)} }, "histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) }, "float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) }, } { @@ -1176,7 +1176,7 @@ func TestChainSampleIteratorHistogramCounterResetHint(t *testing.T) { func TestChainSampleIteratorSeek(t *testing.T) { for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{ - "float": func(ts int64) chunks.Sample { return fSample{0, ts, float64(ts)} }, + "float": func(ts int64) chunks.Sample { return fSample{-ts, ts, float64(ts)} }, "histogram": func(ts int64) chunks.Sample { return histogramSample(ts, uk) }, "float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) }, } { @@ -1224,13 +1224,13 @@ func TestChainSampleIteratorSeek(t *testing.T) { switch merged.Seek(tc.seek) { case chunkenc.ValFloat: t, f := merged.At() - actual = append(actual, fSample{0, t, f}) + actual = append(actual, fSample{merged.AtST(), t, f}) case chunkenc.ValHistogram: t, h := merged.AtHistogram(nil) - actual = append(actual, hSample{0, t, h}) + actual = append(actual, hSample{merged.AtST(), t, h}) case chunkenc.ValFloatHistogram: t, fh := merged.AtFloatHistogram(nil) - actual = append(actual, fhSample{0, t, fh}) + actual = append(actual, fhSample{merged.AtST(), t, fh}) } s, err := ExpandSamples(merged, nil) require.NoError(t, err) @@ -1310,13 +1310,13 @@ func TestChainSampleIteratorSeekHistogramCounterResetHint(t *testing.T) { switch merged.Seek(tc.seek) { case chunkenc.ValFloat: t, f := merged.At() - actual = append(actual, fSample{0, t, f}) + actual = append(actual, fSample{merged.AtST(), t, f}) case chunkenc.ValHistogram: t, h := merged.AtHistogram(nil) - actual = append(actual, hSample{0, t, h}) + actual = append(actual, hSample{merged.AtST(), t, h}) case chunkenc.ValFloatHistogram: t, fh := merged.AtFloatHistogram(nil) - actual = append(actual, fhSample{0, t, fh}) + actual = append(actual, fhSample{merged.AtST(), t, fh}) } s, err := ExpandSamples(merged, nil) require.NoError(t, err) From 6137de085e9615b1ef3f976de405550d493c9249 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Wed, 14 Jan 2026 12:30:24 +0100 Subject: [PATCH 021/165] test ListSeriesIterator with ST MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: György Krajcsovits --- storage/series_test.go | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/storage/series_test.go b/storage/series_test.go index 3ad84be6b0..b33d6cb1b3 100644 --- a/storage/series_test.go +++ b/storage/series_test.go @@ -28,11 +28,11 @@ import ( func TestListSeriesIterator(t *testing.T) { it := NewListSeriesIterator(samples{ - fSample{0, 0, 0}, - fSample{0, 1, 1}, - fSample{0, 1, 1.5}, - fSample{0, 2, 2}, - fSample{0, 3, 3}, + fSample{-10, 0, 0}, + fSample{-9, 1, 1}, + fSample{-8, 1, 1.5}, + fSample{-7, 2, 2}, + fSample{-6, 3, 3}, }) // Seek to the first sample with ts=1. @@ -40,30 +40,35 @@ func TestListSeriesIterator(t *testing.T) { ts, v := it.At() require.Equal(t, int64(1), ts) require.Equal(t, 1., v) + require.Equal(t, int64(-9), it.AtST()) // Seek one further, next sample still has ts=1. require.Equal(t, chunkenc.ValFloat, it.Next()) ts, v = it.At() require.Equal(t, int64(1), ts) require.Equal(t, 1.5, v) + require.Equal(t, int64(-8), it.AtST()) // Seek again to 1 and make sure we stay where we are. require.Equal(t, chunkenc.ValFloat, it.Seek(1)) ts, v = it.At() require.Equal(t, int64(1), ts) require.Equal(t, 1.5, v) + require.Equal(t, int64(-8), it.AtST()) // Another seek. require.Equal(t, chunkenc.ValFloat, it.Seek(3)) ts, v = it.At() require.Equal(t, int64(3), ts) require.Equal(t, 3., v) + require.Equal(t, int64(-6), it.AtST()) // And we don't go back. require.Equal(t, chunkenc.ValFloat, it.Seek(2)) ts, v = it.At() require.Equal(t, int64(3), ts) require.Equal(t, 3., v) + require.Equal(t, int64(-6), it.AtST()) // Seek beyond the end. require.Equal(t, chunkenc.ValNone, it.Seek(5)) From 3374d2e56fc9ce12d19259633f8dd3bbe9015e81 Mon Sep 17 00:00:00 2001 From: Bartlomiej Plotka Date: Wed, 14 Jan 2026 13:48:33 +0000 Subject: [PATCH 022/165] feat(teststorage)[PART4a]: Add AppendableV2 support for mock Appendable (#17834) * feat(teststorage)[PART4a]: Add AppendableV2 support for mock Appendable Signed-off-by: bwplotka * fix: adjusted AppenderV1 flow for reliability Found in https://github.com/prometheus/prometheus/pull/17838 and by Krajo comment Signed-off-by: bwplotka * addressed comments Signed-off-by: bwplotka * fix broken appV2 commit and rollback; added tests Signed-off-by: bwplotka --------- Signed-off-by: bwplotka --- util/teststorage/appender.go | 193 +++++++++++++++++++++------- util/teststorage/appender_test.go | 202 +++++++++++++++++++++++++++--- 2 files changed, 334 insertions(+), 61 deletions(-) diff --git a/util/teststorage/appender.go b/util/teststorage/appender.go index 058a09561c..a98ff9c48f 100644 --- a/util/teststorage/appender.go +++ b/util/teststorage/appender.go @@ -65,13 +65,17 @@ func (s Sample) String() string { // Print all value types on purpose, to catch bugs for appending multiple sample types at once. h := "" if s.H != nil { - h = s.H.String() + h = " " + s.H.String() } fh := "" if s.FH != nil { - fh = s.FH.String() + fh = " " + s.FH.String() } - b.WriteString(fmt.Sprintf("%s %v%v%v st@%v t@%v\n", s.L.String(), s.V, h, fh, s.ST, s.T)) + b.WriteString(fmt.Sprintf("%s %v%v%v st@%v t@%v", s.L.String(), s.V, h, fh, s.ST, s.T)) + if len(s.ES) > 0 { + b.WriteString(fmt.Sprintf(" %v", s.ES)) + } + b.WriteString("\n") return b.String() } @@ -104,7 +108,8 @@ type Appendable struct { rolledbackSamples []Sample // Optional chain (Appender will collect samples, then run next). - next storage.Appendable + next storage.Appendable + nextV2 storage.AppendableV2 } // NewAppendable returns mock Appendable. @@ -112,12 +117,18 @@ func NewAppendable() *Appendable { return &Appendable{} } -// Then chains another appender from the provided appendable for the Appender calls. +// Then chains another appender from the provided Appendable for the Appender calls. func (a *Appendable) Then(appendable storage.Appendable) *Appendable { a.next = appendable return a } +// ThenV2 chains another appenderV2 from the provided AppendableV2 for the AppenderV2 calls. +func (a *Appendable) ThenV2(appendable storage.AppendableV2) *Appendable { + a.nextV2 = appendable + return a +} + // WithErrs allows injecting errors to the appender. func (a *Appendable) WithErrs(appendErrFn func(ls labels.Labels) error, appendExemplarsError, commitErr error) *Appendable { a.appendErrFn = appendErrFn @@ -130,6 +141,9 @@ func (a *Appendable) WithErrs(appendErrFn func(ls labels.Labels) error, appendEx func (a *Appendable) PendingSamples() []Sample { a.mtx.Lock() defer a.mtx.Unlock() + if len(a.pendingSamples) == 0 { + return nil + } ret := make([]Sample, len(a.pendingSamples)) copy(ret, a.pendingSamples) @@ -140,6 +154,9 @@ func (a *Appendable) PendingSamples() []Sample { func (a *Appendable) ResultSamples() []Sample { a.mtx.Lock() defer a.mtx.Unlock() + if len(a.resultSamples) == 0 { + return nil + } ret := make([]Sample, len(a.resultSamples)) copy(ret, a.resultSamples) @@ -150,6 +167,9 @@ func (a *Appendable) ResultSamples() []Sample { func (a *Appendable) RolledbackSamples() []Sample { a.mtx.Lock() defer a.mtx.Unlock() + if len(a.rolledbackSamples) == 0 { + return nil + } ret := make([]Sample, len(a.rolledbackSamples)) copy(ret, a.rolledbackSamples) @@ -205,28 +225,77 @@ func (a *Appendable) String() string { var errClosedAppender = errors.New("appender was already committed/rolledback") -type appender struct { - err error - next storage.Appender +type baseAppender struct { + err error - a *Appendable + nextTr storage.AppenderTransaction + a *Appendable } -func (a *appender) checkErr() error { +func (a *baseAppender) checkErr() error { a.a.mtx.Lock() defer a.a.mtx.Unlock() return a.err } +func (a *baseAppender) Commit() error { + if err := a.checkErr(); err != nil { + return err + } + defer a.a.openAppenders.Dec() + + if a.a.commitErr != nil { + return a.a.commitErr + } + + a.a.mtx.Lock() + a.a.resultSamples = append(a.a.resultSamples, a.a.pendingSamples...) + a.a.pendingSamples = a.a.pendingSamples[:0] + a.err = errClosedAppender + a.a.mtx.Unlock() + + if a.nextTr != nil { + return a.nextTr.Commit() + } + return nil +} + +func (a *baseAppender) Rollback() error { + if err := a.checkErr(); err != nil { + return err + } + defer a.a.openAppenders.Dec() + + a.a.mtx.Lock() + a.a.rolledbackSamples = append(a.a.rolledbackSamples, a.a.pendingSamples...) + a.a.pendingSamples = a.a.pendingSamples[:0] + a.err = errClosedAppender + a.a.mtx.Unlock() + + if a.nextTr != nil { + return a.nextTr.Rollback() + } + return nil +} + +type appender struct { + baseAppender + + next storage.Appender +} + func (a *Appendable) Appender(ctx context.Context) storage.Appender { - ret := &appender{a: a} + ret := &appender{baseAppender: baseAppender{a: a}} if a.openAppenders.Inc() > 1 { ret.err = errors.New("teststorage.Appendable.Appender() concurrent use is not supported; attempted opening new Appender() without Commit/Rollback of the previous one. Extend the implementation if concurrent mock is needed") } if a.next != nil { - ret.next = a.next.Appender(ctx) + app := a.next.Appender(ctx) + ret.next, ret.nextTr = app, app + } else if a.nextV2 != nil { + ret.err = errors.Join(ret.err, errors.New("teststorage.Appendable.Appender() invoked with .ThenV2 but no .Then was supplied; likely bug")) } return ret } @@ -264,7 +333,7 @@ func computeOrCheckRef(ref storage.SeriesRef, ls labels.Labels) (storage.SeriesR if storage.SeriesRef(h) != ref { // Check for buggy ref while we at it. - return 0, errors.New("teststorage.appender: found input ref not matching labels; potential bug in Appendable user") + return 0, errors.New("teststorage.appender: found input ref not matching labels; potential bug in Appendable usage") } return ref, nil } @@ -297,6 +366,7 @@ func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exem if a.a.appendExemplarsError != nil { return 0, a.a.appendExemplarsError } + var appended bool a.a.mtx.Lock() // NOTE(bwplotka): Eventually exemplar has to be attached to a series and soon @@ -306,11 +376,12 @@ func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exem for ; i >= 0; i-- { // Attach exemplars to the last matching sample. if ref == storage.SeriesRef(a.a.pendingSamples[i].L.Hash()) { a.a.pendingSamples[i].ES = append(a.a.pendingSamples[i].ES, e) + appended = true break } } a.a.mtx.Unlock() - if i < 0 { + if !appended { return 0, fmt.Errorf("teststorage.appender: exemplar appender without series; ref %v; l %v; exemplar: %v", ref, l, e) } @@ -336,6 +407,8 @@ func (a *appender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m meta return 0, err } + var updated bool + a.a.mtx.Lock() // NOTE(bwplotka): Eventually metadata has to be attached to a series and soon // the AppenderV2 will guarantee that for TSDB. Assume this from the mock perspective @@ -344,11 +417,12 @@ func (a *appender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m meta for ; i >= 0; i-- { // Attach metadata to the last matching sample. if ref == storage.SeriesRef(a.a.pendingSamples[i].L.Hash()) { a.a.pendingSamples[i].M = m + updated = true break } } a.a.mtx.Unlock() - if i < 0 { + if !updated { return 0, fmt.Errorf("teststorage.appender: metadata update without series; ref %v; l %v; m: %v", ref, l, m) } @@ -358,42 +432,75 @@ func (a *appender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m meta return computeOrCheckRef(ref, l) } -func (a *appender) Commit() error { - if err := a.checkErr(); err != nil { - return err - } - defer a.a.openAppenders.Dec() +type appenderV2 struct { + baseAppender - if a.a.commitErr != nil { - return a.a.commitErr - } - - a.a.mtx.Lock() - a.a.resultSamples = append(a.a.resultSamples, a.a.pendingSamples...) - a.a.pendingSamples = a.a.pendingSamples[:0] - a.err = errClosedAppender - a.a.mtx.Unlock() - - if a.a.next != nil { - return a.next.Commit() - } - return nil + next storage.AppenderV2 } -func (a *appender) Rollback() error { - if err := a.checkErr(); err != nil { - return err +func (a *Appendable) AppenderV2(ctx context.Context) storage.AppenderV2 { + ret := &appenderV2{baseAppender: baseAppender{a: a}} + if a.openAppenders.Inc() > 1 { + ret.err = errors.New("teststorage.Appendable.AppenderV2() concurrent use is not supported; attempted opening new AppenderV2() without Commit/Rollback of the previous one. Extend the implementation if concurrent mock is needed") + } + + if a.nextV2 != nil { + app := a.nextV2.AppenderV2(ctx) + ret.next, ret.nextTr = app, app + } else if a.next != nil { + ret.err = errors.Join(ret.err, errors.New("teststorage.Appendable.AppenderV2() invoked with .Then but no .ThenV2 was supplied; likely bug")) + } + return ret +} + +func (a *appenderV2) Append(ref storage.SeriesRef, ls labels.Labels, st, t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, opts storage.AOptions) (_ storage.SeriesRef, err error) { + if err := a.checkErr(); err != nil { + return 0, err + } + + if a.a.appendErrFn != nil { + if err := a.a.appendErrFn(ls); err != nil { + return 0, err + } } - defer a.a.openAppenders.Dec() a.a.mtx.Lock() - a.a.rolledbackSamples = append(a.a.rolledbackSamples, a.a.pendingSamples...) - a.a.pendingSamples = a.a.pendingSamples[:0] - a.err = errClosedAppender + var es []exemplar.Exemplar + if len(opts.Exemplars) > 0 { + // As per AppenderV2 interface, opts.Exemplar slice is unsafe for reuse. + es = make([]exemplar.Exemplar, len(opts.Exemplars)) + copy(es, opts.Exemplars) + } + a.a.pendingSamples = append(a.a.pendingSamples, Sample{ + MF: opts.MetricFamilyName, + M: opts.Metadata, + L: ls, + ST: st, T: t, + V: v, H: h, FH: fh, + ES: es, + }) a.a.mtx.Unlock() + var partialErr error + if a.a.appendExemplarsError != nil { + var exErrs []error + for range opts.Exemplars { + exErrs = append(exErrs, a.a.appendExemplarsError) + } + if len(exErrs) > 0 { + partialErr = &storage.AppendPartialError{ExemplarErrors: exErrs} + } + } + if a.next != nil { - return a.next.Rollback() + ref, err = a.next.Append(ref, ls, st, t, v, h, fh, opts) + if err != nil { + return ref, err + } } - return nil + ref, err = computeOrCheckRef(ref, ls) + if err != nil { + return ref, err + } + return ref, partialErr } diff --git a/util/teststorage/appender_test.go b/util/teststorage/appender_test.go index 8c2a825c3a..5b0e03483b 100644 --- a/util/teststorage/appender_test.go +++ b/util/teststorage/appender_test.go @@ -19,62 +19,191 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "github.com/prometheus/common/model" "github.com/stretchr/testify/require" "github.com/prometheus/prometheus/model/exemplar" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/metadata" + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/tsdbutil" "github.com/prometheus/prometheus/util/testutil" ) +func testAppendableV1(t *testing.T, appTest *Appendable, a storage.Appendable) { + for _, commit := range []bool{true, false} { + appTest.ResultReset() + + app := a.Appender(t.Context()) + + ref1, err := app.Append(0, labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v1"), 1, 2) + require.NoError(t, err) + + h := tsdbutil.GenerateTestHistogram(0) + _, err = app.AppendHistogram(0, labels.FromStrings(model.MetricNameLabel, "test_metric2", "app", "v1"), 2, h, nil) + require.NoError(t, err) + + fh := tsdbutil.GenerateTestFloatHistogram(0) + _, err = app.AppendHistogram(0, labels.FromStrings(model.MetricNameLabel, "test_metric3", "app", "v1"), 3, nil, fh) + require.NoError(t, err) + + // Update meta of first series. + m1 := metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"} + _, err = app.UpdateMetadata(ref1, labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v1"), m1) + require.NoError(t, err) + + // Add exemplars to the first series. + e1 := exemplar.Exemplar{Labels: labels.FromStrings(model.MetricNameLabel, "yolo"), HasTs: true, Ts: 1} + _, err = app.AppendExemplar(ref1, labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v1"), e1) + require.NoError(t, err) + + exp := []Sample{ + {L: labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v1"), M: m1, T: 1, V: 2, ES: []exemplar.Exemplar{e1}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "app", "v1"), T: 2, H: h}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric3", "app", "v1"), T: 3, FH: fh}, + } + testutil.RequireEqual(t, exp, appTest.PendingSamples()) + require.Nil(t, appTest.ResultSamples()) + require.Nil(t, appTest.RolledbackSamples()) + + if commit { + require.NoError(t, app.Commit()) + require.Nil(t, appTest.PendingSamples()) + testutil.RequireEqual(t, exp, appTest.ResultSamples()) + require.Nil(t, appTest.RolledbackSamples()) + break + } + + require.NoError(t, app.Rollback()) + require.Nil(t, appTest.PendingSamples()) + require.Nil(t, appTest.ResultSamples()) + testutil.RequireEqual(t, exp, appTest.RolledbackSamples()) + } +} + +func testAppendableV2(t *testing.T, appTest *Appendable, a storage.AppendableV2) { + for _, commit := range []bool{true, false} { + appTest.ResultReset() + + app := a.AppenderV2(t.Context()) + + m1 := metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"} + e1 := exemplar.Exemplar{Labels: labels.FromStrings(model.MetricNameLabel, "yolo"), HasTs: true, Ts: 1} + _, err := app.Append(0, labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v2"), -1, 1, 2, nil, nil, storage.AOptions{ + MetricFamilyName: "test_metric1", + Metadata: m1, + Exemplars: []exemplar.Exemplar{e1}, + }) + require.NoError(t, err) + + h := tsdbutil.GenerateTestHistogram(0) + _, err = app.Append(0, labels.FromStrings(model.MetricNameLabel, "test_metric2", "app", "v2"), -2, 2, 0, h, nil, storage.AOptions{}) + require.NoError(t, err) + + fh := tsdbutil.GenerateTestFloatHistogram(0) + _, err = app.Append(0, labels.FromStrings(model.MetricNameLabel, "test_metric3", "app", "v2"), -3, 3, 0, nil, fh, storage.AOptions{}) + require.NoError(t, err) + + exp := []Sample{ + {L: labels.FromStrings(model.MetricNameLabel, "test_metric1", "app", "v2"), MF: "test_metric1", M: m1, ST: -1, T: 1, V: 2, ES: []exemplar.Exemplar{e1}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "app", "v2"), ST: -2, T: 2, H: h}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric3", "app", "v2"), ST: -3, T: 3, FH: fh}, + } + testutil.RequireEqual(t, exp, appTest.PendingSamples()) + require.Nil(t, appTest.ResultSamples()) + require.Nil(t, appTest.RolledbackSamples()) + + if commit { + require.NoError(t, app.Commit()) + require.Nil(t, appTest.PendingSamples()) + testutil.RequireEqual(t, exp, appTest.ResultSamples()) + require.Nil(t, appTest.RolledbackSamples()) + break + } + + require.NoError(t, app.Rollback()) + require.Nil(t, appTest.PendingSamples()) + require.Nil(t, appTest.ResultSamples()) + testutil.RequireEqual(t, exp, appTest.RolledbackSamples()) + } +} + +func TestAppendable(t *testing.T) { + appTest := NewAppendable() + testAppendableV1(t, appTest, appTest) + testAppendableV2(t, appTest, appTest) +} + +func TestAppendable_Then(t *testing.T) { + nextAppTest := NewAppendable() + app := NewAppendable().Then(nextAppTest) + + // Ensure next mock record all the appends when appending to app. + testAppendableV1(t, nextAppTest, app) + + // V2 should fail as Then was supplied with Appendable V1. + require.Error(t, app.AppenderV2(t.Context()).Commit()) +} + +func TestAppendable_ThenV2(t *testing.T) { + nextAppTest := NewAppendable() + app := NewAppendable().ThenV2(nextAppTest) + + // Ensure next mock record all the appends when appending to app. + testAppendableV2(t, nextAppTest, app) + + // V1 should fail as ThenV2 was supplied with Appendable V2. + require.Error(t, app.Appender(t.Context()).Commit()) +} + // TestSample_RequireEqual ensures standard testutil.RequireEqual is enough for comparisons. // This is thanks to the fact metadata has now Equals method. func TestSample_RequireEqual(t *testing.T) { a := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, - {L: labels.FromStrings("__name__", "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo")}}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo")}}}, } testutil.RequireEqual(t, a, a) b1 := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, - {L: labels.FromStrings("__name__", "test_metric2_diff", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, // test_metric2_diff is different. - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo")}}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2_diff", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, // test_metric2_diff is different. + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo")}}}, } requireNotEqual(t, a, b1) b2 := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, - {L: labels.FromStrings("__name__", "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo2")}}}, // exemplar is different. + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo2")}}}, // exemplar is different. } requireNotEqual(t, a, b2) b3 := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, - {L: labels.FromStrings("__name__", "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123, T: 123}, // Timestamp is different. - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo")}}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123, T: 123}, // Timestamp is different. + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo")}}}, } requireNotEqual(t, a, b3) b4 := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, - {L: labels.FromStrings("__name__", "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 456.456}, // Value is different. - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo")}}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter", Unit: "metric", Help: "some help text"}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 456.456}, // Value is different. + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo")}}}, } requireNotEqual(t, a, b4) b5 := []Sample{ {}, - {L: labels.FromStrings("__name__", "test_metric_total"), M: metadata.Metadata{Type: "counter2", Unit: "metric", Help: "some help text"}}, // Different type. - {L: labels.FromStrings("__name__", "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, - {ES: []exemplar.Exemplar{{Labels: labels.FromStrings("__name__", "yolo")}}}, + {L: labels.FromStrings(model.MetricNameLabel, "test_metric_total"), M: metadata.Metadata{Type: "counter2", Unit: "metric", Help: "some help text"}}, // Different type. + {L: labels.FromStrings(model.MetricNameLabel, "test_metric2", "foo", "bar"), M: metadata.Metadata{Type: "gauge", Unit: "", Help: "other help text"}, V: 123.123}, + {ES: []exemplar.Exemplar{{Labels: labels.FromStrings(model.MetricNameLabel, "yolo")}}}, } requireNotEqual(t, a, b5) } @@ -129,3 +258,40 @@ func TestConcurrentAppender_ReturnsErrAppender(t *testing.T) { require.Error(t, app.Commit()) require.Error(t, app.Rollback()) } + +func TestConcurrentAppenderV2_ReturnsErrAppender(t *testing.T) { + a := NewAppendable() + + // Non-concurrent multiple use if fine. + app := a.AppenderV2(t.Context()) + require.Equal(t, int32(1), a.openAppenders.Load()) + require.NoError(t, app.Commit()) + // Repeated commit fails. + require.Error(t, app.Commit()) + + app = a.AppenderV2(t.Context()) + require.NoError(t, app.Rollback()) + // Commit after rollback fails. + require.Error(t, app.Commit()) + + a.WithErrs( + nil, + nil, + errors.New("commit err"), + ) + app = a.AppenderV2(t.Context()) + require.Error(t, app.Commit()) + + a.WithErrs(nil, nil, nil) + app = a.AppenderV2(t.Context()) + require.NoError(t, app.Commit()) + require.Equal(t, int32(0), a.openAppenders.Load()) + + // Concurrent use should return appender that errors. + _ = a.AppenderV2(t.Context()) + app = a.AppenderV2(t.Context()) + _, err := app.Append(0, labels.EmptyLabels(), 0, 0, 0, nil, nil, storage.AOptions{}) + require.Error(t, err) + require.Error(t, app.Commit()) + require.Error(t, app.Rollback()) +} From 49c3aea56d37197e5575c336e60a3ee1c9d8a076 Mon Sep 17 00:00:00 2001 From: bwplotka Date: Mon, 12 Jan 2026 08:45:26 +0000 Subject: [PATCH 023/165] feat(storage)[PART4b]: add AppenderV2 to the rest of storage.Storage impl Signed-off-by: bwplotka --- cmd/prometheus/main.go | 17 ++++++ storage/fanout.go | 64 ++++++++++++++++++++ storage/fanout_test.go | 116 +++++++++++++++++++++++++++++++++++- storage/interface.go | 29 ++++++--- storage/interface_append.go | 1 + storage/remote/storage.go | 7 +++ storage/remote/write.go | 50 +++++++++++++--- tsdb/head_append_v2.go | 1 + 8 files changed, 267 insertions(+), 18 deletions(-) diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index ee60e58b2e..8b82049f50 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -1746,6 +1746,14 @@ func (s *readyStorage) Appender(ctx context.Context) storage.Appender { return notReadyAppender{} } +// AppenderV2 implements the Storage interface. +func (s *readyStorage) AppenderV2(ctx context.Context) storage.AppenderV2 { + if x := s.get(); x != nil { + return x.AppenderV2(ctx) + } + return notReadyAppenderV2{} +} + type notReadyAppender struct{} // SetOptions does nothing in this appender implementation. @@ -1779,6 +1787,15 @@ func (notReadyAppender) Commit() error { return tsdb.ErrNotReady } func (notReadyAppender) Rollback() error { return tsdb.ErrNotReady } +type notReadyAppenderV2 struct{} + +func (notReadyAppenderV2) Append(storage.SeriesRef, labels.Labels, int64, int64, float64, *histogram.Histogram, *histogram.FloatHistogram, storage.AOptions) (storage.SeriesRef, error) { + return 0, tsdb.ErrNotReady +} +func (notReadyAppenderV2) Commit() error { return tsdb.ErrNotReady } + +func (notReadyAppenderV2) Rollback() error { return tsdb.ErrNotReady } + // Close implements the Storage interface. func (s *readyStorage) Close() error { if x := s.get(); x != nil { diff --git a/storage/fanout.go b/storage/fanout.go index afcf993b3f..c5102b442f 100644 --- a/storage/fanout.go +++ b/storage/fanout.go @@ -136,6 +136,19 @@ func (f *fanout) Appender(ctx context.Context) Appender { } } +func (f *fanout) AppenderV2(ctx context.Context) AppenderV2 { + primary := f.primary.AppenderV2(ctx) + secondaries := make([]AppenderV2, 0, len(f.secondaries)) + for _, storage := range f.secondaries { + secondaries = append(secondaries, storage.AppenderV2(ctx)) + } + return &fanoutAppenderV2{ + logger: f.logger, + primary: primary, + secondaries: secondaries, + } +} + // Close closes the storage and all its underlying resources. func (f *fanout) Close() error { errs := []error{ @@ -278,3 +291,54 @@ func (f *fanoutAppender) Rollback() (err error) { } return nil } + +type fanoutAppenderV2 struct { + logger *slog.Logger + + primary AppenderV2 + secondaries []AppenderV2 +} + +func (f *fanoutAppenderV2) Append(ref SeriesRef, l labels.Labels, st, t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, opts AOptions) (SeriesRef, error) { + ref, err := f.primary.Append(ref, l, st, t, v, h, fh, opts) + if err != nil { + return ref, err + } + + for _, appender := range f.secondaries { + if _, err := appender.Append(ref, l, st, t, v, h, fh, opts); err != nil { + return 0, err + } + } + return ref, nil +} + +func (f *fanoutAppenderV2) Commit() (err error) { + err = f.primary.Commit() + + for _, appender := range f.secondaries { + if err == nil { + err = appender.Commit() + } else { + if rollbackErr := appender.Rollback(); rollbackErr != nil { + f.logger.Error("Squashed rollback error on commit", "err", rollbackErr) + } + } + } + return err +} + +func (f *fanoutAppenderV2) Rollback() (err error) { + err = f.primary.Rollback() + + for _, appender := range f.secondaries { + rollbackErr := appender.Rollback() + switch { + case err == nil: + err = rollbackErr + case rollbackErr != nil: + f.logger.Error("Squashed rollback error on rollback", "err", rollbackErr) + } + } + return nil +} diff --git a/storage/fanout_test.go b/storage/fanout_test.go index ed4cf17696..fb2f8dd553 100644 --- a/storage/fanout_test.go +++ b/storage/fanout_test.go @@ -132,6 +132,115 @@ func TestFanout_SelectSorted(t *testing.T) { }) } +func TestFanout_SelectSorted_AppenderV2(t *testing.T) { + inputLabel := labels.FromStrings(model.MetricNameLabel, "a") + outputLabel := labels.FromStrings(model.MetricNameLabel, "a") + + inputTotalSize := 0 + + priStorage := teststorage.New(t) + defer priStorage.Close() + app1 := priStorage.AppenderV2(t.Context()) + _, err := app1.Append(0, inputLabel, 0, 0, 0, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app1.Append(0, inputLabel, 0, 1000, 1, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app1.Append(0, inputLabel, 0, 2000, 2, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + require.NoError(t, app1.Commit()) + + remoteStorage1 := teststorage.New(t) + defer remoteStorage1.Close() + app2 := remoteStorage1.AppenderV2(t.Context()) + _, err = app2.Append(0, inputLabel, 0, 3000, 3, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app2.Append(0, inputLabel, 0, 4000, 4, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app2.Append(0, inputLabel, 0, 5000, 5, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + require.NoError(t, app2.Commit()) + + remoteStorage2 := teststorage.New(t) + defer remoteStorage2.Close() + + app3 := remoteStorage2.AppenderV2(t.Context()) + _, err = app3.Append(0, inputLabel, 0, 6000, 6, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app3.Append(0, inputLabel, 0, 7000, 7, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + _, err = app3.Append(0, inputLabel, 0, 8000, 8, nil, nil, storage.AOptions{}) + require.NoError(t, err) + inputTotalSize++ + + require.NoError(t, app3.Commit()) + + fanoutStorage := storage.NewFanout(nil, priStorage, remoteStorage1, remoteStorage2) + + t.Run("querier", func(t *testing.T) { + querier, err := fanoutStorage.Querier(0, 8000) + require.NoError(t, err) + defer querier.Close() + + matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a") + require.NoError(t, err) + + seriesSet := querier.Select(t.Context(), true, nil, matcher) + + result := make(map[int64]float64) + var labelsResult labels.Labels + var iterator chunkenc.Iterator + for seriesSet.Next() { + series := seriesSet.At() + seriesLabels := series.Labels() + labelsResult = seriesLabels + iterator := series.Iterator(iterator) + for iterator.Next() == chunkenc.ValFloat { + timestamp, value := iterator.At() + result[timestamp] = value + } + } + + require.Equal(t, labelsResult, outputLabel) + require.Len(t, result, inputTotalSize) + }) + t.Run("chunk querier", func(t *testing.T) { + querier, err := fanoutStorage.ChunkQuerier(0, 8000) + require.NoError(t, err) + defer querier.Close() + + matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a") + require.NoError(t, err) + + seriesSet := storage.NewSeriesSetFromChunkSeriesSet(querier.Select(t.Context(), true, nil, matcher)) + + result := make(map[int64]float64) + var labelsResult labels.Labels + var iterator chunkenc.Iterator + for seriesSet.Next() { + series := seriesSet.At() + seriesLabels := series.Labels() + labelsResult = seriesLabels + iterator := series.Iterator(iterator) + for iterator.Next() == chunkenc.ValFloat { + timestamp, value := iterator.At() + result[timestamp] = value + } + } + + require.NoError(t, seriesSet.Err()) + require.Equal(t, labelsResult, outputLabel) + require.Len(t, result, inputTotalSize) + }) +} + func TestFanoutErrors(t *testing.T) { workingStorage := teststorage.New(t) defer workingStorage.Close() @@ -224,9 +333,10 @@ type errChunkQuerier struct{ errQuerier } func (errStorage) ChunkQuerier(_, _ int64) (storage.ChunkQuerier, error) { return errChunkQuerier{}, nil } -func (errStorage) Appender(context.Context) storage.Appender { return nil } -func (errStorage) StartTime() (int64, error) { return 0, nil } -func (errStorage) Close() error { return nil } +func (errStorage) Appender(context.Context) storage.Appender { return nil } +func (errStorage) AppenderV2(context.Context) storage.AppenderV2 { return nil } +func (errStorage) StartTime() (int64, error) { return 0, nil } +func (errStorage) Close() error { return nil } func (errQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { return storage.ErrSeriesSet(errSelect) diff --git a/storage/interface.go b/storage/interface.go index 23b8b48a0c..d6ce895d58 100644 --- a/storage/interface.go +++ b/storage/interface.go @@ -61,7 +61,8 @@ type SeriesRef uint64 // Appendable allows creating Appender. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// Appendable will be removed soon (ETA: Q2 2026). type Appendable interface { // Appender returns a new appender for the storage. // @@ -77,10 +78,16 @@ type SampleAndChunkQueryable interface { } // Storage ingests and manages samples, along with various indexes. All methods -// are goroutine-safe. Storage implements storage.Appender. +// are goroutine-safe. type Storage interface { SampleAndChunkQueryable + + // Appendable allows appending to storage. + // WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). + // Appendable will be removed soon (ETA: Q2 2026). Appendable + // AppendableV2 allows appending to storage. + AppendableV2 // StartTime returns the oldest timestamp stored in the storage. StartTime() (int64, error) @@ -261,7 +268,8 @@ func (f QueryableFunc) Querier(mint, maxt int64) (Querier, error) { // AppendOptions provides options for implementations of the Appender interface. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// AppendOptions will be removed soon (ETA: Q2 2026). type AppendOptions struct { // DiscardOutOfOrder tells implementation that this append should not be out // of order. An OOO append MUST be rejected with storage.ErrOutOfOrderSample @@ -278,7 +286,8 @@ type AppendOptions struct { // I.e. timestamp order within batch is not validated, samples are not reordered per timestamp or by float/histogram // type. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// Appender will be removed soon (ETA: Q2 2026). type Appender interface { AppenderTransaction @@ -315,7 +324,8 @@ type GetRef interface { // ExemplarAppender provides an interface for adding samples to exemplar storage, which // within Prometheus is in-memory only. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// ExemplarAppender will be removed soon (ETA: Q2 2026). type ExemplarAppender interface { // AppendExemplar adds an exemplar for the given series labels. // An optional reference number can be provided to accelerate calls. @@ -333,7 +343,8 @@ type ExemplarAppender interface { // HistogramAppender provides an interface for appending histograms to the storage. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// HistogramAppender will be removed soon (ETA: Q2 2026). type HistogramAppender interface { // AppendHistogram adds a histogram for the given series labels. An // optional reference number can be provided to accelerate calls. A @@ -365,7 +376,8 @@ type HistogramAppender interface { // MetadataUpdater provides an interface for associating metadata to stored series. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// MetadataUpdater will be removed soon (ETA: Q2 2026). type MetadataUpdater interface { // UpdateMetadata updates a metadata entry for the given series and labels. // A series reference number is returned which can be used to modify the @@ -379,7 +391,8 @@ type MetadataUpdater interface { // StartTimestampAppender provides an interface for appending ST to storage. // -// WARNING: Work AppendableV2 is in progress. Appendable will be removed soon (ETA: Q2 2026). +// WARNING(bwplotka): Switch to AppendableV2 is in progress (https://github.com/prometheus/prometheus/issues/17632). +// StartTimestampAppender will be removed soon (ETA: Q2 2026). type StartTimestampAppender interface { // AppendSTZeroSample adds synthetic zero sample for the given st timestamp, // which will be associated with given series, labels and the incoming diff --git a/storage/interface_append.go b/storage/interface_append.go index cc7045dbd5..f2dce8e52e 100644 --- a/storage/interface_append.go +++ b/storage/interface_append.go @@ -69,6 +69,7 @@ type AppendV2Options struct { // Exemplars (optional) attached to the appended sample. // Exemplar slice MUST be sorted by Exemplar.TS. // Exemplar slice is unsafe for reuse. + // Duplicate exemplars errors MUST be ignored by implementations. Exemplars []exemplar.Exemplar // RejectOutOfOrder tells implementation that this append should not be out diff --git a/storage/remote/storage.go b/storage/remote/storage.go index f482597249..be75d23383 100644 --- a/storage/remote/storage.go +++ b/storage/remote/storage.go @@ -63,6 +63,8 @@ type Storage struct { localStartTimeCallback startTimeCallback } +var _ storage.Storage = &Storage{} + // NewStorage returns a remote.Storage. func NewStorage(l *slog.Logger, reg prometheus.Registerer, stCallback startTimeCallback, walDir string, flushDeadline time.Duration, sm ReadyScrapeManager, enableTypeAndUnitLabels bool) *Storage { if l == nil { @@ -193,6 +195,11 @@ func (s *Storage) Appender(ctx context.Context) storage.Appender { return s.rws.Appender(ctx) } +// AppenderV2 implements storage.Storage. +func (s *Storage) AppenderV2(ctx context.Context) storage.AppenderV2 { + return s.rws.AppenderV2(ctx) +} + // LowestSentTimestamp returns the lowest sent timestamp across all queues. func (s *Storage) LowestSentTimestamp() int64 { return s.rws.LowestSentTimestamp() diff --git a/storage/remote/write.go b/storage/remote/write.go index 92f447d624..91000a1d25 100644 --- a/storage/remote/write.go +++ b/storage/remote/write.go @@ -238,8 +238,20 @@ func (rws *WriteStorage) ApplyConfig(conf *config.Config) error { // Appender implements storage.Storage. func (rws *WriteStorage) Appender(context.Context) storage.Appender { return ×tampTracker{ - writeStorage: rws, - highestRecvTimestamp: rws.highestTimestamp, + baseTimestampTracker: baseTimestampTracker{ + writeStorage: rws, + highestRecvTimestamp: rws.highestTimestamp, + }, + } +} + +// AppenderV2 implements storage.Storage. +func (rws *WriteStorage) AppenderV2(context.Context) storage.AppenderV2 { + return ×tampTrackerV2{ + baseTimestampTracker: baseTimestampTracker{ + writeStorage: rws, + highestRecvTimestamp: rws.highestTimestamp, + }, } } @@ -282,9 +294,9 @@ func (rws *WriteStorage) Close() error { return nil } -type timestampTracker struct { - writeStorage *WriteStorage - appendOptions *storage.AppendOptions +type baseTimestampTracker struct { + writeStorage *WriteStorage + samples int64 exemplars int64 histograms int64 @@ -292,6 +304,12 @@ type timestampTracker struct { highestRecvTimestamp *maxTimestamp } +type timestampTracker struct { + baseTimestampTracker + + appendOptions *storage.AppendOptions +} + func (t *timestampTracker) SetOptions(opts *storage.AppendOptions) { t.appendOptions = opts } @@ -345,7 +363,7 @@ func (*timestampTracker) UpdateMetadata(storage.SeriesRef, labels.Labels, metada } // Commit implements storage.Appender. -func (t *timestampTracker) Commit() error { +func (t *baseTimestampTracker) Commit() error { t.writeStorage.samplesIn.incr(t.samples + t.exemplars + t.histograms) samplesIn.Add(float64(t.samples)) @@ -356,6 +374,24 @@ func (t *timestampTracker) Commit() error { } // Rollback implements storage.Appender. -func (*timestampTracker) Rollback() error { +func (*baseTimestampTracker) Rollback() error { return nil } + +type timestampTrackerV2 struct { + baseTimestampTracker +} + +func (t *timestampTrackerV2) Append(ref storage.SeriesRef, _ labels.Labels, _, ts int64, _ float64, h *histogram.Histogram, fh *histogram.FloatHistogram, opts storage.AOptions) (storage.SeriesRef, error) { + switch { + case fh != nil, h != nil: + t.histograms++ + default: + t.samples++ + } + if ts > t.highestTimestamp { + t.highestTimestamp = ts + } + t.exemplars += int64(len(opts.Exemplars)) + return ref, nil +} diff --git a/tsdb/head_append_v2.go b/tsdb/head_append_v2.go index 241fb42e97..4a62d56741 100644 --- a/tsdb/head_append_v2.go +++ b/tsdb/head_append_v2.go @@ -323,6 +323,7 @@ func (a *headAppenderV2) appendExemplars(s *memSeries, exemplar []exemplar.Exemp if err := a.head.exemplars.ValidateExemplar(s.labels(), e); err != nil { if !errors.Is(err, storage.ErrDuplicateExemplar) && !errors.Is(err, storage.ErrExemplarsDisabled) { // Except duplicates, return partial errors. + // TODO(bwplotka): Add exemplar info into error. errs = append(errs, err) continue } From 8a2921e3851d886367308aaedc7545ee4cd8138a Mon Sep 17 00:00:00 2001 From: bwplotka Date: Wed, 14 Jan 2026 13:57:48 +0000 Subject: [PATCH 024/165] addressed feedback Signed-off-by: bwplotka --- storage/fanout.go | 4 ++-- storage/remote/write.go | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/storage/fanout.go b/storage/fanout.go index c5102b442f..95c6499952 100644 --- a/storage/fanout.go +++ b/storage/fanout.go @@ -289,7 +289,7 @@ func (f *fanoutAppender) Rollback() (err error) { f.logger.Error("Squashed rollback error on rollback", "err", rollbackErr) } } - return nil + return err } type fanoutAppenderV2 struct { @@ -340,5 +340,5 @@ func (f *fanoutAppenderV2) Rollback() (err error) { f.logger.Error("Squashed rollback error on rollback", "err", rollbackErr) } } - return nil + return err } diff --git a/storage/remote/write.go b/storage/remote/write.go index 91000a1d25..6a336dc06b 100644 --- a/storage/remote/write.go +++ b/storage/remote/write.go @@ -382,6 +382,7 @@ type timestampTrackerV2 struct { baseTimestampTracker } +// Append implements storage.AppenderV2. func (t *timestampTrackerV2) Append(ref storage.SeriesRef, _ labels.Labels, _, ts int64, _ float64, h *histogram.Histogram, fh *histogram.FloatHistogram, opts storage.AOptions) (storage.SeriesRef, error) { switch { case fh != nil, h != nil: From 06a59346fe3236cddbd5d739b38c2ac52fa686e4 Mon Sep 17 00:00:00 2001 From: George Krajcsovits Date: Wed, 14 Jan 2026 16:41:03 +0100 Subject: [PATCH 025/165] Update tsdb/chunkenc/chunk.go Co-authored-by: Arve Knudsen Signed-off-by: George Krajcsovits --- tsdb/chunkenc/chunk.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tsdb/chunkenc/chunk.go b/tsdb/chunkenc/chunk.go index d5e028e681..711966ec39 100644 --- a/tsdb/chunkenc/chunk.go +++ b/tsdb/chunkenc/chunk.go @@ -152,7 +152,7 @@ type Iterator interface { // Before the iterator has advanced, the behaviour is unspecified. AtT() int64 // AtST returns the current start timestamp. - // Return 0 if the start timestamp is not implemented or not set. + // Returns 0 if the start timestamp is not implemented or not set. // Before the iterator has advanced, the behaviour is unspecified. AtST() int64 // Err returns the current error. It should be used only after the From ccb7468b0917abbce8d932a83185366b5b7fc440 Mon Sep 17 00:00:00 2001 From: Julius Hinze Date: Wed, 14 Jan 2026 16:44:50 +0100 Subject: [PATCH 026/165] tsdb: fix grow/shrink nextIndex calculation (#17863) Signed-off-by: Julius Hinze --- tsdb/exemplar.go | 21 +++++++++++-------- tsdb/exemplar_test.go | 49 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 57 insertions(+), 13 deletions(-) diff --git a/tsdb/exemplar.go b/tsdb/exemplar.go index b58976c911..71589cf21e 100644 --- a/tsdb/exemplar.go +++ b/tsdb/exemplar.go @@ -327,7 +327,8 @@ func (ce *CircularExemplarStorage) grow(l int64) int { {from: ce.nextIndex, to: oldSize}, {from: 0, to: ce.nextIndex}, } - ce.nextIndex = copyExemplarRanges(ce.index, newSlice, ce.exemplars, ranges) + totalCopied, _ := copyExemplarRanges(ce.index, newSlice, ce.exemplars, ranges) + ce.nextIndex = totalCopied ce.exemplars = newSlice return oldSize } @@ -353,6 +354,7 @@ func (ce *CircularExemplarStorage) shrink(l int64) (migrated int) { newSlice := make([]circularBufferEntry, int(l)) + var totalCopied int switch { case deleteStart == deleteEnd: // The entire buffer was cleared (shrink to zero). Note that we don't have to @@ -363,18 +365,18 @@ func (ce *CircularExemplarStorage) shrink(l int64) (migrated int) { return 0 case deleteStart < deleteEnd: // We delete an "inner" section of the circular buffer. - migrated = copyExemplarRanges(ce.index, newSlice, ce.exemplars, []intRange{ + totalCopied, migrated = copyExemplarRanges(ce.index, newSlice, ce.exemplars, []intRange{ {from: deleteEnd, to: oldSize}, {from: 0, to: deleteStart}, }) case deleteStart > deleteEnd: // We keep an "inner" section of the circular buffer. - migrated = copyExemplarRanges(ce.index, newSlice, ce.exemplars, []intRange{ + totalCopied, migrated = copyExemplarRanges(ce.index, newSlice, ce.exemplars, []intRange{ {from: deleteEnd, to: deleteStart}, }) } - ce.nextIndex = migrated % int(l) + ce.nextIndex = totalCopied % int(l) ce.exemplars = newSlice return migrated } @@ -582,20 +584,21 @@ func (e intRange) contains(i int) bool { } // copyExemplarRanges copies non-overlapping ranges from src into dest and -// adjusts list pointers in dest and index accordingly. Returns the number of -// copied items. +// adjusts list pointers in dest and index accordingly. Returns the total +// number of slots copied (for nextIndex) and the number of non-empty entries +// migrated. func copyExemplarRanges( index map[string]*indexEntry, dest, src []circularBufferEntry, ranges []intRange, -) int { +) (totalCopied, migratedEntries int) { offsets := make([]int, len(ranges)) n := 0 for i, rng := range ranges { offsets[i] = n - rng.from n += copy(dest[n:], src[rng.from:rng.to]) } - migratedEntries := n + migratedEntries = n for di := range n { e := &dest[di] if e.ref == nil { @@ -631,5 +634,5 @@ func copyExemplarRanges( } } } - return migratedEntries + return n, migratedEntries } diff --git a/tsdb/exemplar_test.go b/tsdb/exemplar_test.go index 01ffeb9541..6ecba25489 100644 --- a/tsdb/exemplar_test.go +++ b/tsdb/exemplar_test.go @@ -390,7 +390,7 @@ func TestCircularExemplarStorage_Resize(t *testing.T) { {Labels: series1, Value: 0.1, Ts: 1}, {Labels: series1, Value: 0.2, Ts: 2}, }, - wantNextIndex: 2, + wantNextIndex: 3, }, { name: "in-order, shrink", @@ -431,7 +431,7 @@ func TestCircularExemplarStorage_Resize(t *testing.T) { {Labels: series1, Value: 0.2, Ts: 2}, {Labels: series1, Value: 0.3, Ts: 3}, }, - wantNextIndex: 2, + wantNextIndex: 3, }, { name: "duplicate timestamps", @@ -452,7 +452,7 @@ func TestCircularExemplarStorage_Resize(t *testing.T) { exemplars: []exemplar.Exemplar{}, resize: 10, wantExemplars: []exemplar.Exemplar{}, - wantNextIndex: 0, + wantNextIndex: 3, }, { name: "empty input, shrink", @@ -507,7 +507,7 @@ func TestCircularExemplarStorage_Resize(t *testing.T) { wantExemplars: []exemplar.Exemplar{ {Labels: series1, Value: 0.1, Ts: 1}, }, - wantNextIndex: 1, + wantNextIndex: 0, }, } @@ -660,6 +660,47 @@ func TestCircularExemplarStorage_Resize(t *testing.T) { {Labels: series1, Value: 0.6, Ts: 6}, }, }, + { + name: "grow non-full buffer then add entries", + addExemplars1: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + {Labels: series1, Value: 0.2, Ts: 2}, + }, + resize1: 10, + wantExemplars1: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + {Labels: series1, Value: 0.2, Ts: 2}, + }, + resize2: 10, + addExemplars2: []exemplar.Exemplar{ + {Labels: series1, Value: 0.3, Ts: 3}, + {Labels: series1, Value: 0.4, Ts: 4}, + }, + wantExemplars2: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + {Labels: series1, Value: 0.2, Ts: 2}, + {Labels: series1, Value: 0.3, Ts: 3}, + {Labels: series1, Value: 0.4, Ts: 4}, + }, + }, + { + name: "shrink non-full buffer then add entries", + addExemplars1: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + }, + resize1: 2, + wantExemplars1: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + }, + resize2: 2, + addExemplars2: []exemplar.Exemplar{ + {Labels: series1, Value: 0.2, Ts: 2}, + }, + wantExemplars2: []exemplar.Exemplar{ + {Labels: series1, Value: 0.1, Ts: 1}, + {Labels: series1, Value: 0.2, Ts: 2}, + }, + }, } for _, tc := range resizeTwiceCases { From af3277f8326431808ecec2a2095404ad3422a929 Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Wed, 3 Dec 2025 18:46:35 +0100 Subject: [PATCH 027/165] PromQL: Add `fill*()` binop modifiers to provide default values for missing series Signed-off-by: Julius Volz --- promql/engine.go | 54 +- promql/parser/ast.go | 13 + promql/parser/generated_parser.y | 73 +- promql/parser/generated_parser.y.go | 1284 +++++++++-------- promql/parser/lex.go | 31 + promql/parser/parse.go | 6 + promql/parser/printer.go | 13 + promql/parser/printer_test.go | 20 + web/api/v1/translate_ast.go | 4 + .../ExplainViews/BinaryExpr/VectorVector.tsx | 73 +- web/ui/mantine-ui/src/promql/ast.ts | 5 + web/ui/mantine-ui/src/promql/binOp.test.ts | 24 + web/ui/mantine-ui/src/promql/binOp.ts | 29 +- web/ui/mantine-ui/src/promql/format.tsx | 43 +- web/ui/mantine-ui/src/promql/serialize.ts | 18 +- .../src/promql/serializeAndFormat.test.ts | 13 + .../src/complete/promql.terms.ts | 4 + .../src/parser/vector.test.ts | 85 +- .../codemirror-promql/src/parser/vector.ts | 35 + .../codemirror-promql/src/types/vector.ts | 7 + web/ui/module/lezer-promql/src/promql.grammar | 24 +- web/ui/module/lezer-promql/src/tokens.js | 140 +- 22 files changed, 1296 insertions(+), 702 deletions(-) diff --git a/promql/engine.go b/promql/engine.go index 57a1f41bb8..b609dc4f0a 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -2862,7 +2862,8 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * if matching.Card == parser.CardManyToMany { panic("many-to-many only allowed for set operators") } - if len(lhs) == 0 || len(rhs) == 0 { + if (len(lhs) == 0 && len(rhs) == 0) || + ((len(lhs) == 0 || len(rhs) == 0) && matching.FillValues.RHS == nil && matching.FillValues.LHS == nil) { return nil, nil // Short-circuit: nothing is going to match. } @@ -2910,17 +2911,9 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * } matchedSigs := enh.matchedSigs - // For all lhs samples find a respective rhs sample and perform - // the binary operation. var lastErr error - for i, ls := range lhs { - sigOrd := lhsh[i].sigOrdinal - - rs, found := rightSigs[sigOrd] // Look for a match in the rhs Vector. - if !found { - continue - } + doBinOp := func(ls, rs Sample, sigOrd int) { // Account for potentially swapped sidedness. fl, fr := ls.F, rs.F hl, hr := ls.H, rs.H @@ -2931,7 +2924,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * floatValue, histogramValue, keep, info, err := vectorElemBinop(op, fl, fr, hl, hr, pos) if err != nil { lastErr = err - continue + return } if info != nil { lastErr = info @@ -2971,7 +2964,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * } if !keep && !returnBool { - continue + return } enh.Out = append(enh.Out, Sample{ @@ -2981,6 +2974,43 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * DropName: returnBool, }) } + + // For all lhs samples, find a respective rhs sample and perform + // the binary operation. + for i, ls := range lhs { + sigOrd := lhsh[i].sigOrdinal + + rs, found := rightSigs[sigOrd] // Look for a match in the rhs Vector. + if !found { + fill := matching.FillValues.RHS + if fill == nil { + continue + } + rs = Sample{ + Metric: ls.Metric.MatchLabels(matching.On, matching.MatchingLabels...), + F: *fill, + } + } + + doBinOp(ls, rs, sigOrd) + } + + // For any rhs samples which have not been matched, check if we need to + // perform the operation with a fill value from the lhs. + if fill := matching.FillValues.LHS; fill != nil { + for sigOrd, rs := range rightSigs { + if _, matched := matchedSigs[sigOrd]; matched { + continue // Already matched. + } + ls := Sample{ + Metric: rs.Metric.MatchLabels(matching.On, matching.MatchingLabels...), + F: *fill, + } + + doBinOp(ls, rs, sigOrd) + } + } + return enh.Out, lastErr } diff --git a/promql/parser/ast.go b/promql/parser/ast.go index 130f9aefb7..6496095287 100644 --- a/promql/parser/ast.go +++ b/promql/parser/ast.go @@ -318,6 +318,19 @@ type VectorMatching struct { // Include contains additional labels that should be included in // the result from the side with the lower cardinality. Include []string + // Fill-in values to use when a series from one side does not find a match on the other side. + FillValues VectorMatchFillValues +} + +// VectorMatchFillValues contains the fill values to use for Vector matching +// when one side does not find a match on the other side. +// When a fill value is nil, no fill is applied for that side, and there +// is no output for the match group if there is no match. +type VectorMatchFillValues struct { + // RHS is the fill value to use for the right-hand side. + RHS *float64 + // LHS is the fill value to use for the left-hand side. + LHS *float64 } // Visitor allows visiting a Node and its child nodes. The Visit method is diff --git a/promql/parser/generated_parser.y b/promql/parser/generated_parser.y index 47776f53d0..71ab6ed4b3 100644 --- a/promql/parser/generated_parser.y +++ b/promql/parser/generated_parser.y @@ -139,6 +139,9 @@ BOOL BY GROUP_LEFT GROUP_RIGHT +FILL +FILL_LEFT +FILL_RIGHT IGNORING OFFSET SMOOTHED @@ -190,7 +193,7 @@ START_METRIC_SELECTOR %type int %type uint %type number series_value signed_number signed_or_unsigned_number -%type step_invariant_expr aggregate_expr aggregate_modifier bin_modifier binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers label_matchers matrix_selector number_duration_literal offset_expr anchored_expr smoothed_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector duration_expr paren_duration_expr positive_duration_expr offset_duration_expr +%type step_invariant_expr aggregate_expr aggregate_modifier bin_modifier fill_modifiers binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers fill_value label_matchers matrix_selector number_duration_literal offset_expr anchored_expr smoothed_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector duration_expr paren_duration_expr positive_duration_expr offset_duration_expr %start start @@ -302,7 +305,7 @@ binary_expr : expr ADD bin_modifier expr { $$ = yylex.(*parser).newBinar // Using left recursion for the modifier rules, helps to keep the parser stack small and // reduces allocations. -bin_modifier : group_modifiers; +bin_modifier : fill_modifiers; bool_modifier : /* empty */ { $$ = &BinaryExpr{ @@ -346,6 +349,47 @@ group_modifiers: bool_modifier /* empty */ } ; +fill_modifiers: group_modifiers /* empty */ + /* Only fill() */ + | group_modifiers FILL fill_value + { + $$ = $1 + fill := $3.(*NumberLiteral).Val + $$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill + $$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill + } + /* Only fill_left() */ + | group_modifiers FILL_LEFT fill_value + { + $$ = $1 + fill := $3.(*NumberLiteral).Val + $$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill + } + /* Only fill_right() */ + | group_modifiers FILL_RIGHT fill_value + { + $$ = $1 + fill := $3.(*NumberLiteral).Val + $$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill + } + /* fill_left() fill_right() */ + | group_modifiers FILL_LEFT fill_value FILL_RIGHT fill_value + { + $$ = $1 + fill_left := $3.(*NumberLiteral).Val + fill_right := $5.(*NumberLiteral).Val + $$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left + $$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right + } + /* fill_right() fill_left() */ + | group_modifiers FILL_RIGHT fill_value FILL_LEFT fill_value + { + fill_right := $3.(*NumberLiteral).Val + fill_left := $5.(*NumberLiteral).Val + $$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left + $$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right + } + ; grouping_labels : LEFT_PAREN grouping_label_list RIGHT_PAREN { $$ = $2 } @@ -387,6 +431,21 @@ grouping_label : maybe_label { yylex.(*parser).unexpected("grouping opts", "label"); $$ = Item{} } ; +fill_value : LEFT_PAREN number_duration_literal RIGHT_PAREN + { + $$ = $2.(*NumberLiteral) + } + | LEFT_PAREN unary_op number_duration_literal RIGHT_PAREN + { + nl := $3.(*NumberLiteral) + if $2.Typ == SUB { + nl.Val *= -1 + } + nl.PosRange.Start = $2.Pos + $$ = nl + } + ; + /* * Function calls. */ @@ -697,7 +756,7 @@ metric : metric_identifier label_set ; -metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED; +metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | FILL | FILL_LEFT | FILL_RIGHT | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED; label_set : LEFT_BRACE label_set_list RIGHT_BRACE { $$ = labels.New($2...) } @@ -954,7 +1013,7 @@ counter_reset_hint : UNKNOWN_COUNTER_RESET | COUNTER_RESET | NOT_COUNTER_RESET | aggregate_op : AVG | BOTTOMK | COUNT | COUNT_VALUES | GROUP | MAX | MIN | QUANTILE | STDDEV | STDVAR | SUM | TOPK | LIMITK | LIMIT_RATIO; // Inside of grouping options label names can be recognized as keywords by the lexer. This is a list of keywords that could also be a label name. -maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2 | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED; +maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | FILL | FILL_LEFT | FILL_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2 | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED; unary_op : ADD | SUB; @@ -1162,7 +1221,7 @@ offset_duration_expr : number_duration_literal } | duration_expr ; - + min_max: MIN | MAX ; duration_expr : number_duration_literal @@ -1277,14 +1336,14 @@ duration_expr : number_duration_literal ; paren_duration_expr : LEFT_PAREN duration_expr RIGHT_PAREN - { + { yylex.(*parser).experimentalDurationExpr($2.(Expr)) if durationExpr, ok := $2.(*DurationExpr); ok { durationExpr.Wrapped = true $$ = durationExpr break } - $$ = $2 + $$ = $2 } ; diff --git a/promql/parser/generated_parser.y.go b/promql/parser/generated_parser.y.go index f5feec0b55..d20460ed5b 100644 --- a/promql/parser/generated_parser.y.go +++ b/promql/parser/generated_parser.y.go @@ -113,31 +113,34 @@ const BOOL = 57420 const BY = 57421 const GROUP_LEFT = 57422 const GROUP_RIGHT = 57423 -const IGNORING = 57424 -const OFFSET = 57425 -const SMOOTHED = 57426 -const ANCHORED = 57427 -const ON = 57428 -const WITHOUT = 57429 -const keywordsEnd = 57430 -const preprocessorStart = 57431 -const START = 57432 -const END = 57433 -const STEP = 57434 -const RANGE = 57435 -const preprocessorEnd = 57436 -const counterResetHintsStart = 57437 -const UNKNOWN_COUNTER_RESET = 57438 -const COUNTER_RESET = 57439 -const NOT_COUNTER_RESET = 57440 -const GAUGE_TYPE = 57441 -const counterResetHintsEnd = 57442 -const startSymbolsStart = 57443 -const START_METRIC = 57444 -const START_SERIES_DESCRIPTION = 57445 -const START_EXPRESSION = 57446 -const START_METRIC_SELECTOR = 57447 -const startSymbolsEnd = 57448 +const FILL = 57424 +const FILL_LEFT = 57425 +const FILL_RIGHT = 57426 +const IGNORING = 57427 +const OFFSET = 57428 +const SMOOTHED = 57429 +const ANCHORED = 57430 +const ON = 57431 +const WITHOUT = 57432 +const keywordsEnd = 57433 +const preprocessorStart = 57434 +const START = 57435 +const END = 57436 +const STEP = 57437 +const RANGE = 57438 +const preprocessorEnd = 57439 +const counterResetHintsStart = 57440 +const UNKNOWN_COUNTER_RESET = 57441 +const COUNTER_RESET = 57442 +const NOT_COUNTER_RESET = 57443 +const GAUGE_TYPE = 57444 +const counterResetHintsEnd = 57445 +const startSymbolsStart = 57446 +const START_METRIC = 57447 +const START_SERIES_DESCRIPTION = 57448 +const START_EXPRESSION = 57449 +const START_METRIC_SELECTOR = 57450 +const startSymbolsEnd = 57451 var yyToknames = [...]string{ "$end", @@ -221,6 +224,9 @@ var yyToknames = [...]string{ "BY", "GROUP_LEFT", "GROUP_RIGHT", + "FILL", + "FILL_LEFT", + "FILL_RIGHT", "IGNORING", "OFFSET", "SMOOTHED", @@ -258,376 +264,403 @@ var yyExca = [...]int16{ -1, 1, 1, -1, -2, 0, - -1, 41, - 1, 150, - 10, 150, - 24, 150, + -1, 44, + 1, 161, + 10, 161, + 24, 161, -2, 0, - -1, 72, - 2, 193, - 15, 193, - 79, 193, - 87, 193, - -2, 107, - -1, 73, - 2, 194, - 15, 194, - 79, 194, - 87, 194, - -2, 108, - -1, 74, - 2, 195, - 15, 195, - 79, 195, - 87, 195, - -2, 110, -1, 75, - 2, 196, - 15, 196, - 79, 196, - 87, 196, - -2, 111, - -1, 76, - 2, 197, - 15, 197, - 79, 197, - 87, 197, - -2, 112, - -1, 77, - 2, 198, - 15, 198, - 79, 198, - 87, 198, - -2, 117, - -1, 78, - 2, 199, - 15, 199, - 79, 199, - 87, 199, - -2, 119, - -1, 79, - 2, 200, - 15, 200, - 79, 200, - 87, 200, - -2, 121, - -1, 80, - 2, 201, - 15, 201, - 79, 201, - 87, 201, - -2, 122, - -1, 81, - 2, 202, - 15, 202, - 79, 202, - 87, 202, - -2, 123, - -1, 82, - 2, 203, - 15, 203, - 79, 203, - 87, 203, - -2, 124, - -1, 83, 2, 204, 15, 204, 79, 204, - 87, 204, - -2, 125, - -1, 84, + 90, 204, + -2, 115, + -1, 76, 2, 205, 15, 205, 79, 205, - 87, 205, - -2, 129, - -1, 85, + 90, 205, + -2, 116, + -1, 77, 2, 206, 15, 206, 79, 206, - 87, 206, + 90, 206, + -2, 118, + -1, 78, + 2, 207, + 15, 207, + 79, 207, + 90, 207, + -2, 119, + -1, 79, + 2, 208, + 15, 208, + 79, 208, + 90, 208, + -2, 123, + -1, 80, + 2, 209, + 15, 209, + 79, 209, + 90, 209, + -2, 128, + -1, 81, + 2, 210, + 15, 210, + 79, 210, + 90, 210, -2, 130, - -1, 137, - 41, 274, - 42, 274, - 52, 274, - 53, 274, - 57, 274, + -1, 82, + 2, 211, + 15, 211, + 79, 211, + 90, 211, + -2, 132, + -1, 83, + 2, 212, + 15, 212, + 79, 212, + 90, 212, + -2, 133, + -1, 84, + 2, 213, + 15, 213, + 79, 213, + 90, 213, + -2, 134, + -1, 85, + 2, 214, + 15, 214, + 79, 214, + 90, 214, + -2, 135, + -1, 86, + 2, 215, + 15, 215, + 79, 215, + 90, 215, + -2, 136, + -1, 87, + 2, 216, + 15, 216, + 79, 216, + 90, 216, + -2, 140, + -1, 88, + 2, 217, + 15, 217, + 79, 217, + 90, 217, + -2, 141, + -1, 140, + 41, 288, + 42, 288, + 52, 288, + 53, 288, + 57, 288, -2, 22, - -1, 251, - 9, 259, - 12, 259, - 13, 259, - 18, 259, - 19, 259, - 25, 259, - 41, 259, - 47, 259, - 48, 259, - 51, 259, - 57, 259, - 62, 259, - 63, 259, - 64, 259, - 65, 259, - 66, 259, - 67, 259, - 68, 259, - 69, 259, - 70, 259, - 71, 259, - 72, 259, - 73, 259, - 74, 259, - 75, 259, - 79, 259, - 83, 259, - 84, 259, - 85, 259, - 87, 259, - 90, 259, - 91, 259, - 92, 259, - 93, 259, + -1, 258, + 9, 273, + 12, 273, + 13, 273, + 18, 273, + 19, 273, + 25, 273, + 41, 273, + 47, 273, + 48, 273, + 51, 273, + 57, 273, + 62, 273, + 63, 273, + 64, 273, + 65, 273, + 66, 273, + 67, 273, + 68, 273, + 69, 273, + 70, 273, + 71, 273, + 72, 273, + 73, 273, + 74, 273, + 75, 273, + 79, 273, + 82, 273, + 83, 273, + 84, 273, + 86, 273, + 87, 273, + 88, 273, + 90, 273, + 93, 273, + 94, 273, + 95, 273, + 96, 273, -2, 0, - -1, 252, - 9, 259, - 12, 259, - 13, 259, - 18, 259, - 19, 259, - 25, 259, - 41, 259, - 47, 259, - 48, 259, - 51, 259, - 57, 259, - 62, 259, - 63, 259, - 64, 259, - 65, 259, - 66, 259, - 67, 259, - 68, 259, - 69, 259, - 70, 259, - 71, 259, - 72, 259, - 73, 259, - 74, 259, - 75, 259, - 79, 259, - 83, 259, - 84, 259, - 85, 259, - 87, 259, - 90, 259, - 91, 259, - 92, 259, - 93, 259, + -1, 259, + 9, 273, + 12, 273, + 13, 273, + 18, 273, + 19, 273, + 25, 273, + 41, 273, + 47, 273, + 48, 273, + 51, 273, + 57, 273, + 62, 273, + 63, 273, + 64, 273, + 65, 273, + 66, 273, + 67, 273, + 68, 273, + 69, 273, + 70, 273, + 71, 273, + 72, 273, + 73, 273, + 74, 273, + 75, 273, + 79, 273, + 82, 273, + 83, 273, + 84, 273, + 86, 273, + 87, 273, + 88, 273, + 90, 273, + 93, 273, + 94, 273, + 95, 273, + 96, 273, -2, 0, } const yyPrivate = 57344 -const yyLast = 1050 +const yyLast = 1224 var yyAct = [...]int16{ - 58, 186, 413, 411, 341, 418, 286, 243, 197, 95, - 189, 48, 355, 144, 70, 227, 93, 251, 252, 356, - 159, 190, 65, 120, 17, 88, 127, 130, 128, 129, - 22, 425, 426, 427, 428, 131, 249, 121, 124, 335, - 250, 67, 132, 126, 408, 407, 377, 332, 125, 123, - 331, 102, 126, 122, 336, 154, 324, 6, 397, 18, - 19, 111, 112, 20, 135, 114, 137, 119, 101, 375, - 337, 323, 375, 330, 11, 12, 14, 15, 16, 21, - 23, 25, 26, 27, 28, 29, 33, 34, 43, 133, - 329, 13, 116, 118, 117, 24, 38, 37, 146, 30, - 402, 124, 31, 32, 35, 36, 130, 412, 138, 396, - 194, 125, 123, 328, 131, 126, 365, 182, 239, 401, - 193, 199, 204, 205, 206, 207, 208, 209, 177, 363, - 362, 181, 200, 200, 200, 200, 200, 200, 200, 178, - 120, 238, 223, 201, 201, 201, 201, 201, 201, 201, - 212, 215, 134, 200, 136, 211, 210, 2, 3, 4, - 5, 222, 233, 221, 201, 245, 235, 384, 333, 371, - 228, 247, 229, 360, 370, 359, 246, 358, 188, 273, - 140, 368, 114, 195, 119, 194, 277, 139, 62, 369, - 268, 237, 229, 271, 185, 193, 441, 200, 61, 196, - 367, 201, 273, 383, 155, 278, 279, 280, 201, 116, - 118, 117, 231, 200, 236, 121, 124, 195, 382, 440, - 86, 218, 230, 232, 201, 381, 125, 123, 276, 275, - 126, 122, 231, 196, 274, 146, 87, 132, 439, 327, - 429, 438, 230, 232, 248, 141, 184, 183, 419, 253, - 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, - 264, 265, 266, 267, 334, 357, 191, 192, 214, 353, - 354, 202, 203, 361, 121, 124, 88, 364, 283, 7, - 39, 213, 282, 199, 200, 125, 123, 395, 200, 126, - 122, 366, 10, 194, 200, 201, 394, 281, 393, 201, - 392, 391, 90, 193, 390, 201, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 389, 194, 388, 120, 195, 373, 387, 386, 385, - 153, 99, 193, 62, 442, 374, 376, 200, 378, 185, - 56, 196, 40, 61, 379, 380, 89, 152, 201, 151, - 1, 100, 102, 103, 195, 104, 105, 175, 71, 108, - 109, 398, 111, 112, 113, 86, 114, 115, 119, 101, - 196, 66, 200, 55, 9, 9, 54, 404, 8, 53, - 406, 87, 41, 201, 52, 158, 410, 51, 414, 415, - 416, 184, 183, 116, 118, 117, 421, 420, 423, 422, - 417, 430, 50, 49, 289, 47, 156, 216, 147, 46, - 431, 432, 200, 372, 299, 433, 202, 203, 145, 96, - 305, 435, 157, 201, 403, 437, 326, 288, 147, 94, - 436, 97, 45, 44, 57, 242, 434, 234, 145, 338, - 443, 200, 97, 98, 121, 124, 143, 240, 284, 301, - 302, 97, 201, 303, 91, 125, 123, 424, 187, 126, - 122, 316, 287, 59, 290, 292, 294, 295, 296, 304, - 306, 309, 310, 311, 312, 313, 317, 318, 142, 0, - 291, 293, 297, 298, 300, 307, 322, 321, 308, 289, - 96, 0, 314, 315, 319, 320, 226, 150, 405, 299, - 94, 225, 149, 0, 0, 305, 0, 0, 92, 285, - 0, 0, 288, 97, 224, 148, 62, 121, 124, 0, - 0, 0, 272, 0, 0, 0, 61, 0, 125, 123, - 0, 0, 126, 122, 301, 302, 0, 0, 303, 0, - 0, 0, 0, 0, 0, 0, 316, 0, 86, 290, - 292, 294, 295, 296, 304, 306, 309, 310, 311, 312, - 313, 317, 318, 0, 87, 291, 293, 297, 298, 300, - 307, 322, 321, 308, 184, 183, 0, 314, 315, 319, - 320, 62, 0, 120, 60, 88, 0, 63, 0, 0, - 22, 61, 0, 0, 217, 0, 0, 64, 0, 269, - 270, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 100, 102, 0, 86, 0, 0, 0, 0, 0, 18, - 19, 111, 112, 20, 0, 114, 115, 119, 101, 87, - 0, 0, 0, 0, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 0, 0, - 400, 13, 116, 118, 117, 24, 38, 37, 399, 30, - 0, 0, 31, 32, 68, 69, 62, 42, 0, 60, - 88, 0, 63, 0, 0, 22, 61, 121, 124, 0, - 0, 0, 64, 0, 121, 124, 0, 0, 125, 123, - 0, 0, 126, 122, 0, 125, 123, 0, 86, 126, - 122, 0, 0, 0, 18, 19, 0, 0, 20, 0, - 0, 0, 0, 0, 87, 0, 0, 0, 0, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 85, 0, 0, 0, 13, 0, 0, 220, - 24, 38, 37, 0, 30, 0, 325, 31, 32, 68, - 69, 62, 0, 0, 60, 88, 0, 63, 121, 124, - 22, 61, 0, 0, 0, 0, 0, 64, 0, 125, - 123, 0, 0, 126, 122, 0, 0, 0, 0, 0, - 121, 124, 0, 86, 0, 0, 0, 0, 0, 18, - 19, 125, 123, 20, 0, 126, 122, 0, 0, 87, - 0, 0, 0, 0, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 17, 39, - 0, 13, 0, 0, 22, 24, 38, 37, 0, 30, - 340, 0, 31, 32, 68, 69, 0, 339, 0, 0, - 0, 343, 344, 342, 349, 351, 348, 350, 345, 346, - 347, 352, 241, 18, 19, 0, 194, 20, 0, 244, - 0, 0, 0, 247, 0, 0, 193, 0, 11, 12, - 14, 15, 16, 21, 23, 25, 26, 27, 28, 29, - 33, 34, 0, 0, 120, 13, 0, 0, 195, 24, - 38, 37, 219, 30, 0, 0, 31, 32, 35, 36, - 0, 0, 0, 120, 196, 0, 0, 0, 0, 0, - 0, 100, 102, 103, 0, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 0, 114, 115, 119, 101, - 100, 102, 103, 0, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 198, 114, 115, 119, 101, 120, - 0, 62, 0, 116, 118, 117, 0, 185, 176, 0, - 0, 61, 0, 0, 0, 62, 0, 0, 0, 0, - 0, 185, 116, 118, 117, 61, 100, 102, 103, 0, - 104, 105, 106, 86, 108, 109, 110, 111, 112, 113, - 0, 114, 115, 119, 101, 0, 0, 86, 0, 87, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 184, - 183, 0, 0, 87, 0, 0, 0, 0, 116, 118, - 117, 0, 0, 184, 183, 409, 0, 0, 0, 0, - 0, 0, 0, 0, 202, 203, 343, 344, 342, 349, - 351, 348, 350, 345, 346, 347, 352, 0, 179, 180, + 61, 363, 190, 429, 351, 436, 431, 293, 247, 201, + 98, 51, 147, 193, 369, 96, 231, 412, 413, 370, + 132, 133, 68, 130, 73, 163, 194, 131, 443, 444, + 445, 446, 134, 135, 256, 253, 254, 255, 257, 258, + 259, 129, 70, 426, 123, 425, 124, 127, 391, 342, + 157, 458, 223, 198, 447, 389, 415, 128, 126, 345, + 451, 129, 125, 197, 414, 465, 398, 138, 379, 140, + 6, 103, 105, 106, 346, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 199, 117, 118, 122, 104, + 347, 136, 343, 46, 124, 127, 389, 133, 334, 251, + 397, 200, 149, 377, 192, 128, 126, 199, 134, 129, + 125, 198, 141, 333, 420, 396, 119, 121, 120, 123, + 186, 197, 395, 200, 203, 208, 209, 210, 211, 212, + 213, 181, 376, 419, 430, 204, 204, 204, 204, 204, + 204, 204, 182, 199, 185, 227, 205, 205, 205, 205, + 205, 205, 205, 216, 219, 215, 204, 341, 214, 200, + 137, 117, 139, 122, 339, 385, 237, 205, 239, 464, + 384, 249, 226, 2, 3, 4, 5, 91, 290, 225, + 340, 123, 289, 280, 250, 383, 364, 338, 124, 127, + 284, 119, 121, 120, 275, 195, 196, 288, 218, 128, + 126, 204, 460, 129, 125, 205, 280, 278, 158, 105, + 374, 217, 205, 286, 287, 423, 243, 204, 241, 114, + 115, 124, 127, 117, 373, 122, 104, 372, 205, 222, + 143, 437, 128, 126, 124, 127, 129, 125, 65, 242, + 149, 240, 337, 142, 42, 128, 126, 418, 64, 129, + 125, 285, 252, 119, 121, 120, 365, 366, 260, 261, + 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 344, 371, 127, 367, 368, 198, 283, + 375, 124, 127, 282, 378, 128, 126, 281, 197, 129, + 203, 204, 128, 126, 135, 204, 129, 125, 198, 380, + 65, 204, 205, 144, 7, 409, 205, 408, 197, 407, + 64, 406, 205, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, 202, 232, + 199, 233, 89, 156, 417, 65, 387, 405, 463, 233, + 404, 189, 102, 224, 403, 64, 200, 204, 90, 388, + 390, 10, 392, 124, 127, 393, 394, 462, 205, 402, + 461, 93, 124, 127, 128, 126, 401, 89, 129, 125, + 400, 235, 399, 128, 126, 416, 410, 129, 125, 235, + 8, 234, 236, 90, 44, 59, 204, 411, 43, 234, + 236, 92, 422, 188, 187, 1, 179, 205, 424, 155, + 428, 154, 230, 432, 433, 434, 150, 229, 74, 335, + 439, 438, 441, 440, 449, 450, 148, 435, 58, 452, + 228, 206, 207, 448, 336, 57, 296, 56, 386, 100, + 204, 69, 453, 454, 9, 9, 309, 455, 99, 55, + 457, 205, 315, 124, 127, 162, 421, 150, 97, 295, + 99, 54, 459, 53, 128, 126, 238, 148, 129, 125, + 97, 100, 153, 204, 466, 146, 52, 152, 95, 50, + 100, 311, 312, 100, 205, 313, 160, 220, 49, 161, + 151, 48, 159, 326, 47, 60, 297, 299, 301, 302, + 303, 314, 316, 319, 320, 321, 322, 323, 327, 328, + 246, 456, 298, 300, 304, 305, 306, 307, 308, 310, + 317, 332, 331, 318, 296, 348, 101, 324, 325, 329, + 330, 245, 244, 291, 309, 198, 94, 442, 248, 191, + 315, 350, 251, 294, 292, 197, 62, 295, 349, 145, + 0, 0, 353, 354, 352, 359, 361, 358, 360, 355, + 356, 357, 362, 0, 0, 0, 0, 199, 0, 311, + 312, 0, 0, 313, 0, 0, 0, 0, 0, 0, + 0, 326, 0, 200, 297, 299, 301, 302, 303, 314, + 316, 319, 320, 321, 322, 323, 327, 328, 0, 0, + 298, 300, 304, 305, 306, 307, 308, 310, 317, 332, + 331, 318, 0, 0, 0, 324, 325, 329, 330, 65, + 0, 0, 63, 91, 0, 66, 427, 0, 25, 64, + 0, 0, 221, 0, 0, 67, 0, 353, 354, 352, + 359, 361, 358, 360, 355, 356, 357, 362, 0, 0, + 0, 89, 0, 0, 0, 0, 0, 21, 22, 0, + 0, 23, 0, 0, 0, 0, 0, 90, 0, 0, + 0, 0, 75, 76, 77, 78, 79, 80, 81, 82, + 83, 84, 85, 86, 87, 88, 0, 0, 0, 13, + 0, 0, 16, 17, 18, 0, 27, 41, 40, 0, + 33, 0, 0, 34, 35, 71, 72, 65, 45, 0, + 63, 91, 0, 66, 0, 0, 25, 64, 0, 0, + 0, 0, 0, 67, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 89, + 0, 0, 0, 0, 0, 21, 22, 0, 0, 23, + 0, 0, 0, 0, 0, 90, 0, 0, 0, 0, + 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 0, 0, 0, 13, 0, 0, + 16, 17, 18, 0, 27, 41, 40, 0, 33, 0, + 0, 34, 35, 71, 72, 65, 0, 0, 63, 91, + 0, 66, 0, 0, 25, 64, 0, 0, 0, 0, + 0, 67, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 89, 0, 0, + 0, 0, 0, 21, 22, 0, 0, 23, 0, 0, + 0, 0, 0, 90, 0, 0, 0, 0, 75, 76, + 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 0, 0, 0, 13, 0, 0, 16, 17, + 18, 0, 27, 41, 40, 0, 33, 20, 91, 34, + 35, 71, 72, 25, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 21, 22, 0, 0, 23, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 11, 12, 14, + 15, 19, 24, 26, 28, 29, 30, 31, 32, 36, + 37, 0, 0, 0, 13, 0, 0, 16, 17, 18, + 0, 27, 41, 40, 0, 33, 20, 42, 34, 35, + 38, 39, 25, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 21, 22, 0, 0, 23, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 11, 12, 14, 15, + 19, 24, 26, 28, 29, 30, 31, 32, 36, 37, + 123, 0, 0, 13, 0, 0, 16, 17, 18, 0, + 27, 41, 40, 0, 33, 0, 0, 34, 35, 38, + 39, 123, 0, 0, 0, 0, 0, 103, 105, 106, + 0, 107, 108, 109, 110, 111, 112, 113, 114, 115, + 116, 0, 117, 118, 122, 104, 0, 0, 103, 105, + 106, 0, 107, 108, 109, 0, 111, 112, 113, 114, + 115, 116, 382, 117, 118, 122, 104, 0, 0, 65, + 0, 123, 119, 121, 120, 189, 65, 0, 0, 64, + 0, 381, 189, 0, 0, 0, 64, 0, 0, 0, + 0, 0, 0, 119, 121, 120, 0, 0, 103, 105, + 106, 89, 107, 108, 0, 0, 111, 112, 89, 114, + 115, 116, 180, 117, 118, 122, 104, 90, 0, 65, + 0, 0, 0, 0, 90, 189, 65, 188, 187, 64, + 0, 0, 279, 0, 188, 187, 64, 123, 0, 0, + 0, 0, 0, 119, 121, 120, 0, 0, 0, 0, + 0, 89, 0, 0, 0, 206, 207, 0, 89, 0, + 0, 0, 206, 207, 103, 105, 0, 90, 0, 0, + 0, 0, 0, 0, 90, 114, 115, 188, 187, 117, + 118, 122, 104, 0, 188, 187, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 183, 184, 0, 0, 119, + 121, 120, 276, 277, } var yyPact = [...]int16{ - 55, 269, 806, 806, 657, 12, -1000, -1000, -1000, 267, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 488, - -1000, 329, -1000, 889, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -4, 27, - 222, -1000, -1000, 742, -1000, 742, 263, -1000, 172, 165, - 230, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 426, -1000, - -1000, 495, -1000, -1000, 345, 326, -1000, -1000, 31, -1000, - -58, -58, -58, -58, -58, -58, -58, -58, -58, -58, - -58, -58, -58, -58, -58, -58, 956, -1000, -1000, 176, - 942, 324, 324, 324, 324, 324, 324, 222, -52, -1000, - 266, 266, 572, -1000, 870, 717, 126, -13, -1000, 141, - 139, 324, 494, -1000, -1000, 168, 188, -1000, -1000, 417, - -1000, 189, -1000, 116, 847, 742, -1000, -46, -63, -1000, - 742, 742, 742, 742, 742, 742, 742, 742, 742, 742, - 742, 742, 742, 742, 742, -1000, -1000, -1000, 507, 219, - 214, 213, -4, -1000, -1000, 324, -1000, 190, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, 101, 101, 276, -1000, -4, - -1000, 324, 172, 165, 59, 59, -13, -13, -13, -13, - -1000, -1000, -1000, 487, -1000, -1000, 49, -1000, 889, -1000, - -1000, -1000, -1000, 739, -1000, 406, -1000, 88, -1000, -1000, - -1000, -1000, -1000, 48, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 21, 142, 13, -1000, -1000, -1000, 813, 9, 266, - 266, 266, 266, 126, 126, 569, 569, 569, 310, 935, - 569, 569, 310, 126, 126, 569, 126, 9, -1000, 162, - 160, 158, 324, -13, 108, 107, 324, 717, 94, -1000, - -1000, -1000, 179, -1000, 167, -1000, -1000, -1000, -1000, -1000, + 68, 294, 934, 934, 688, 855, -1000, -1000, -1000, 231, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 742, 324, -1000, -1000, -1000, -1000, - -1000, -1000, 53, 53, 20, 53, 155, 155, 201, 150, - -1000, -1000, 323, 322, 321, 317, 315, 298, 295, 294, - 292, 290, 281, -1000, -1000, -1000, -1000, -1000, 87, 36, - 324, 636, -1000, -1000, 643, -1000, 98, -1000, -1000, -1000, - 402, -1000, 889, 476, -1000, -1000, -1000, 53, -1000, 19, - 18, 1008, -1000, -1000, -1000, 50, 284, 284, 284, 101, - 234, 234, 50, 234, 50, -65, -1000, -1000, 233, -1000, - 324, -1000, -1000, -1000, -1000, -1000, -1000, 53, 53, -1000, - -1000, -1000, 53, -1000, -1000, -1000, -1000, -1000, -1000, 284, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 324, - 403, -1000, -1000, -1000, 217, -1000, 174, -1000, 313, -1000, - -1000, -1000, -1000, -1000, + -1000, -1000, 448, -1000, 340, -1000, 996, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, 5, 18, 279, -1000, -1000, 776, -1000, 776, 164, + -1000, 228, 215, 288, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, 445, -1000, -1000, 460, -1000, -1000, 397, 329, -1000, + -1000, 26, -1000, -53, -53, -53, -53, -53, -53, -53, + -53, -53, -53, -53, -53, -53, -53, -53, -53, 1120, + -1000, -1000, 102, 326, 1077, 1077, 1077, 1077, 1077, 1077, + 279, -58, -1000, 196, 196, 600, -1000, 30, 321, 105, + -15, -1000, 157, 150, 1077, 400, -1000, -1000, 327, 335, + -1000, -1000, 436, -1000, 216, -1000, 214, 516, 776, -1000, + -47, -51, -41, -1000, 776, 776, 776, 776, 776, 776, + 776, 776, 776, 776, 776, 776, 776, 776, 776, -1000, + -1000, -1000, 1127, 272, 268, 264, 5, -1000, -1000, 1077, + -1000, 236, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 269, + 269, 176, -1000, 5, -1000, 1077, 228, 215, 233, 233, + -15, -15, -15, -15, -1000, -1000, -1000, 512, -1000, -1000, + 91, -1000, 996, -1000, -1000, -1000, -1000, 402, -1000, 404, + -1000, 162, -1000, -1000, -1000, -1000, -1000, 155, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 23, 66, 33, -1000, -1000, + -1000, 514, 167, 171, 171, 171, 196, 196, 196, 196, + 105, 105, 1133, 1133, 1133, 1067, 1017, 1133, 1133, 1067, + 105, 105, 1133, 105, 167, -1000, 212, 209, 195, 1077, + -15, 110, 81, 1077, 321, 46, -1000, -1000, -1000, 1070, + -1000, 163, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 776, 1077, -1000, -1000, -1000, -1000, + -1000, -1000, 36, 36, 22, 36, 83, 83, 98, 49, + -1000, -1000, 366, 364, 360, 353, 338, 334, 331, 305, + 303, 301, 299, -1000, 291, -67, -65, -1000, -1000, -1000, + -1000, -1000, 42, 34, 1077, 312, -1000, -1000, 240, -1000, + 112, -1000, -1000, -1000, 424, -1000, 996, 193, -1000, -1000, + -1000, 36, -1000, 19, 17, 599, -1000, -1000, -1000, 77, + 289, 289, 289, 269, 217, 217, 77, 217, 77, -71, + 32, 229, 171, 171, -1000, -1000, 53, -1000, 1077, -1000, + -1000, -1000, -1000, -1000, -1000, 36, 36, -1000, -1000, -1000, + 36, -1000, -1000, -1000, -1000, -1000, -1000, 289, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 29, -1000, + -1000, 1077, 180, -1000, -1000, -1000, 336, -1000, -1000, 147, + -1000, 44, -1000, -1000, -1000, -1000, -1000, } var yyPgo = [...]int16{ - 0, 478, 13, 463, 6, 15, 462, 371, 22, 458, - 9, 457, 14, 292, 378, 454, 16, 448, 19, 12, - 447, 443, 7, 439, 4, 5, 436, 3, 2, 10, - 435, 21, 1, 434, 433, 26, 204, 432, 422, 88, - 409, 407, 28, 406, 41, 405, 11, 403, 402, 387, - 385, 384, 379, 376, 373, 340, 0, 358, 8, 357, - 350, 342, + 0, 539, 12, 536, 7, 16, 533, 431, 22, 529, + 10, 527, 24, 351, 380, 526, 15, 523, 19, 14, + 522, 516, 8, 515, 4, 5, 501, 3, 6, 13, + 500, 26, 2, 485, 484, 23, 208, 482, 481, 479, + 93, 478, 477, 27, 476, 1, 42, 469, 11, 466, + 453, 451, 445, 439, 427, 425, 418, 385, 0, 408, + 9, 396, 395, 388, } var yyR1 = [...]int8{ - 0, 60, 60, 60, 60, 60, 60, 60, 39, 39, - 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, - 39, 39, 39, 34, 34, 34, 34, 35, 35, 37, - 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, - 37, 37, 37, 37, 37, 36, 38, 38, 50, 50, - 43, 43, 43, 43, 18, 18, 18, 18, 17, 17, - 17, 4, 4, 4, 40, 42, 42, 41, 41, 41, - 51, 58, 47, 47, 48, 49, 33, 33, 33, 9, - 9, 45, 53, 53, 53, 53, 53, 53, 54, 55, - 55, 55, 44, 44, 44, 1, 1, 1, 2, 2, - 2, 2, 2, 2, 2, 14, 14, 7, 7, 7, + 0, 62, 62, 62, 62, 62, 62, 62, 40, 40, + 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, + 40, 40, 40, 34, 34, 34, 34, 35, 35, 38, + 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, + 38, 38, 38, 38, 38, 36, 39, 39, 52, 52, + 44, 44, 44, 44, 37, 37, 37, 37, 37, 37, + 18, 18, 18, 18, 17, 17, 17, 4, 4, 4, + 45, 45, 41, 43, 43, 42, 42, 42, 53, 60, + 49, 49, 50, 51, 33, 33, 33, 9, 9, 47, + 55, 55, 55, 55, 55, 55, 56, 57, 57, 57, + 46, 46, 46, 1, 1, 1, 2, 2, 2, 2, + 2, 2, 2, 14, 14, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 13, 13, 13, 13, 15, - 15, 15, 16, 16, 16, 16, 16, 16, 16, 61, - 21, 21, 21, 21, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 30, 30, 30, 22, 22, 22, 22, - 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, - 24, 24, 24, 24, 25, 25, 26, 26, 26, 11, - 11, 11, 11, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 6, 6, 6, + 7, 7, 7, 7, 7, 7, 13, 13, 13, 13, + 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, + 63, 21, 21, 21, 21, 20, 20, 20, 20, 20, + 20, 20, 20, 20, 30, 30, 30, 22, 22, 22, + 22, 23, 23, 23, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 24, 25, 25, 26, 26, 26, + 11, 11, 11, 11, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 8, 8, 5, 5, 5, 5, 46, 46, 29, 29, - 31, 31, 32, 32, 28, 27, 27, 52, 10, 19, - 19, 59, 59, 59, 59, 59, 59, 59, 59, 59, - 59, 12, 12, 56, 56, 56, 56, 56, 56, 56, - 56, 56, 56, 56, 56, 57, + 6, 6, 6, 6, 8, 8, 5, 5, 5, 5, + 48, 48, 29, 29, 31, 31, 32, 32, 28, 27, + 27, 54, 10, 19, 19, 61, 61, 61, 61, 61, + 61, 61, 61, 61, 61, 12, 12, 58, 58, 58, + 58, 58, 58, 58, 58, 58, 58, 58, 58, 59, } var yyR2 = [...]int8{ @@ -636,126 +669,131 @@ var yyR2 = [...]int8{ 1, 1, 1, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 0, 1, 3, 3, - 1, 1, 3, 3, 3, 4, 2, 1, 3, 1, - 2, 1, 1, 1, 2, 3, 2, 3, 1, 2, - 3, 1, 3, 3, 2, 2, 3, 5, 3, 1, - 1, 4, 6, 5, 6, 5, 4, 3, 2, 2, - 1, 1, 3, 4, 2, 3, 1, 2, 3, 3, - 1, 3, 3, 2, 1, 2, 1, 1, 1, 1, + 1, 1, 3, 3, 1, 3, 3, 3, 5, 5, + 3, 4, 2, 1, 3, 1, 2, 1, 1, 1, + 3, 4, 2, 3, 2, 3, 1, 2, 3, 1, + 3, 3, 2, 2, 3, 5, 3, 1, 1, 4, + 6, 5, 6, 5, 4, 3, 2, 2, 1, 1, + 3, 4, 2, 3, 1, 2, 3, 3, 1, 3, + 3, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 3, 4, 2, 0, 3, - 1, 2, 3, 3, 1, 3, 3, 2, 1, 2, - 0, 3, 2, 1, 1, 3, 1, 3, 4, 1, - 3, 5, 5, 1, 1, 1, 4, 3, 3, 2, - 3, 1, 2, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 4, 3, 3, 1, 2, 1, + 1, 1, 1, 1, 1, 1, 3, 4, 2, 0, + 3, 1, 2, 3, 3, 1, 3, 3, 2, 1, + 2, 0, 3, 2, 1, 1, 3, 1, 3, 4, + 1, 3, 5, 5, 1, 1, 1, 4, 3, 3, + 2, 3, 1, 2, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 4, 3, 3, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 2, 1, 1, 1, 2, 1, 1, 1, 0, - 1, 1, 2, 3, 3, 4, 4, 6, 7, 4, - 1, 1, 1, 1, 2, 3, 3, 3, 3, 3, - 3, 3, 3, 6, 1, 3, + 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, + 1, 1, 1, 0, 1, 1, 2, 3, 3, 4, + 4, 6, 7, 4, 1, 1, 1, 1, 2, 3, + 3, 3, 3, 3, 3, 3, 3, 6, 1, 3, } var yyChk = [...]int16{ - -1000, -60, 102, 103, 104, 105, 2, 10, -14, -7, - -13, 62, 63, 79, 64, 65, 66, 12, 47, 48, - 51, 67, 18, 68, 83, 69, 70, 71, 72, 73, - 87, 90, 91, 74, 75, 92, 93, 85, 84, 13, - -61, -14, 10, -39, -34, -37, -40, -45, -46, -47, - -48, -49, -51, -52, -53, -54, -55, -33, -56, -3, - 12, 19, 9, 15, 25, -8, -7, -44, 92, 93, - -12, -57, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 41, 57, 13, -55, - -13, -15, 20, -16, 12, -10, 2, 25, -21, 2, - 41, 59, 42, 43, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 56, 57, 83, 85, 84, 58, - 14, 41, 57, 53, 42, 52, 56, -35, -42, 2, - 79, 87, 15, -42, -39, -56, -39, -56, -44, 15, - 15, 15, -1, 20, -2, 12, -10, 2, 20, 7, - 2, 4, 2, 4, 24, -36, -43, -38, -50, 78, - -36, -36, -36, -36, -36, -36, -36, -36, -36, -36, - -36, -36, -36, -36, -36, -59, 2, -46, -8, 92, - 93, -12, -56, 68, 67, 15, -32, -9, 2, -29, - -31, 90, 91, 19, 9, 41, 57, -58, 2, -56, - -46, -8, 92, 93, -56, -56, -56, -56, -56, -56, - -42, -35, -18, 15, 2, -18, -41, 22, -39, 22, - 22, 22, 22, -56, 20, 7, 2, -5, 2, 4, - 54, 44, 55, -5, 20, -16, 25, 2, 25, 2, - -20, 5, -30, -22, 12, -29, -31, 16, -39, 82, - 86, 80, 81, -39, -39, -39, -39, -39, -39, -39, - -39, -39, -39, -39, -39, -39, -39, -39, -46, 92, - 93, -12, 15, -56, 15, 15, 15, -56, 15, -29, - -29, 21, 6, 2, -17, 22, -4, -6, 25, 2, - 62, 78, 63, 79, 64, 65, 66, 80, 81, 12, - 82, 47, 48, 51, 67, 18, 68, 83, 86, 69, - 70, 71, 72, 73, 90, 91, 59, 74, 75, 92, - 93, 85, 84, 22, 7, 7, 20, -2, 25, 2, + -1000, -62, 105, 106, 107, 108, 2, 10, -14, -7, + -13, 62, 63, 79, 64, 65, 82, 83, 84, 66, + 12, 47, 48, 51, 67, 18, 68, 86, 69, 70, + 71, 72, 73, 90, 93, 94, 74, 75, 95, 96, + 88, 87, 13, -63, -14, 10, -40, -34, -38, -41, + -47, -48, -49, -50, -51, -53, -54, -55, -56, -57, + -33, -58, -3, 12, 19, 9, 15, 25, -8, -7, + -46, 95, 96, -12, -59, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 41, + 57, 13, -57, -13, -15, 20, -16, 12, -10, 2, + 25, -21, 2, 41, 59, 42, 43, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 56, 57, 86, + 88, 87, 58, 14, 41, 57, 53, 42, 52, 56, + -35, -43, 2, 79, 90, 15, -43, -40, -58, -40, + -58, -46, 15, 15, 15, -1, 20, -2, 12, -10, + 2, 20, 7, 2, 4, 2, 4, 24, -36, -37, + -44, -39, -52, 78, -36, -36, -36, -36, -36, -36, + -36, -36, -36, -36, -36, -36, -36, -36, -36, -61, + 2, -48, -8, 95, 96, -12, -58, 68, 67, 15, + -32, -9, 2, -29, -31, 93, 94, 19, 9, 41, + 57, -60, 2, -58, -48, -8, 95, 96, -58, -58, + -58, -58, -58, -58, -43, -35, -18, 15, 2, -18, + -42, 22, -40, 22, 22, 22, 22, -58, 20, 7, + 2, -5, 2, 4, 54, 44, 55, -5, 20, -16, + 25, 2, 25, 2, -20, 5, -30, -22, 12, -29, + -31, 16, -40, 82, 83, 84, 85, 89, 80, 81, + -40, -40, -40, -40, -40, -40, -40, -40, -40, -40, + -40, -40, -40, -40, -40, -48, 95, 96, -12, 15, + -58, 15, 15, 15, -58, 15, -29, -29, 21, 6, + 2, -17, 22, -4, -6, 25, 2, 62, 78, 63, + 79, 64, 65, 66, 80, 81, 82, 83, 84, 12, + 85, 47, 48, 51, 67, 18, 68, 86, 89, 69, + 70, 71, 72, 73, 93, 94, 59, 74, 75, 95, + 96, 88, 87, 22, 7, 7, 20, -2, 25, 2, 25, 2, 26, 26, -31, 26, 41, 57, -23, 24, 17, -24, 30, 28, 29, 35, 36, 37, 33, 31, - 34, 32, 38, -18, -18, -19, -18, -19, 15, 15, - 15, -56, 22, 22, -56, 22, -58, 21, 2, 22, - 7, 2, -39, -56, -28, 19, -28, 26, -28, -22, - -22, 24, 17, 2, 17, 6, 6, 6, 6, 6, - 6, 6, 6, 6, 6, 6, 22, 22, -56, 22, - 7, 21, 2, 22, -4, 22, -28, 26, 26, 17, - -24, -27, 57, -28, -32, -32, -32, -29, -25, 14, - -25, -27, -25, -27, -11, 96, 97, 98, 99, 7, - -56, -28, -28, -28, -26, -32, -56, 22, 24, 21, - 2, 22, 21, -32, + 34, 32, 38, -45, 15, -45, -45, -18, -18, -19, + -18, -19, 15, 15, 15, -58, 22, 22, -58, 22, + -60, 21, 2, 22, 7, 2, -40, -58, -28, 19, + -28, 26, -28, -22, -22, 24, 17, 2, 17, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + -48, -8, 84, 83, 22, 22, -58, 22, 7, 21, + 2, 22, -4, 22, -28, 26, 26, 17, -24, -27, + 57, -28, -32, -32, -32, -29, -25, 14, -25, -27, + -25, -27, -11, 99, 100, 101, 102, 22, -48, -45, + -45, 7, -58, -28, -28, -28, -26, -32, 22, -58, + 22, 24, 21, 2, 22, 21, -32, } var yyDef = [...]int16{ - 0, -2, 138, 138, 0, 0, 7, 6, 1, 138, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 0, - 2, -2, 3, 4, 8, 9, 10, 11, 12, 13, - 14, 15, 16, 17, 18, 19, 20, 21, 22, 0, - 113, 246, 247, 0, 257, 0, 90, 91, 131, 132, - 0, 284, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, -2, -2, -2, -2, 240, 241, 0, 5, - 105, 0, 137, 140, 0, 144, 148, 258, 149, 153, - 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, - 46, 46, 46, 46, 46, 46, 0, 74, 75, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 25, 26, - 0, 0, 0, 64, 0, 22, 88, -2, 89, 0, - 0, 0, 0, 94, 96, 0, 100, 104, 135, 0, - 141, 0, 147, 0, 152, 0, 45, 50, 51, 47, + 0, -2, 149, 149, 0, 0, 7, 6, 1, 149, + 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, + 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, + 144, 145, 0, 2, -2, 3, 4, 8, 9, 10, + 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 0, 124, 260, 261, 0, 271, 0, 98, + 99, 142, 143, 0, 298, -2, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, -2, -2, -2, 254, + 255, 0, 5, 113, 0, 148, 151, 0, 155, 159, + 272, 160, 164, 46, 46, 46, 46, 46, 46, 46, + 46, 46, 46, 46, 46, 46, 46, 46, 46, 0, + 82, 83, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 25, 26, 0, 0, 0, 72, 0, 22, 96, + -2, 97, 0, 0, 0, 0, 102, 104, 0, 108, + 112, 146, 0, 152, 0, 158, 0, 163, 0, 45, + 54, 50, 51, 47, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, + 81, 275, 0, 0, 0, 0, 284, 285, 286, 0, + 84, 0, 86, 266, 267, 87, 88, 262, 263, 0, + 0, 0, 95, 79, 287, 0, 0, 0, 289, 290, + 291, 292, 293, 294, 23, 24, 27, 0, 63, 28, + 0, 74, 76, 78, 299, 295, 296, 0, 100, 0, + 105, 0, 111, 256, 257, 258, 259, 0, 147, 150, + 153, 156, 154, 157, 162, 165, 167, 170, 174, 175, + 176, 0, 29, 0, 0, 0, 0, 0, -2, -2, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, 44, 276, 0, 0, 0, 0, + 288, 0, 0, 0, 0, 0, 264, 265, 89, 0, + 94, 0, 62, 65, 67, 68, 69, 218, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, + 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 73, 77, 0, 101, 103, 106, 110, + 107, 109, 0, 0, 0, 0, 0, 0, 0, 0, + 180, 182, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 55, 0, 56, 57, 48, 49, 52, + 274, 53, 0, 0, 0, 0, 277, 278, 0, 85, + 0, 91, 93, 60, 0, 66, 75, 0, 166, 268, + 168, 0, 171, 0, 0, 0, 178, 183, 179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 72, 73, 261, 0, 0, - 0, 0, 270, 271, 272, 0, 76, 0, 78, 252, - 253, 79, 80, 248, 249, 0, 0, 0, 87, 71, - 273, 0, 0, 0, 275, 276, 277, 278, 279, 280, - 23, 24, 27, 0, 57, 28, 0, 66, 68, 70, - 285, 281, 282, 0, 92, 0, 97, 0, 103, 242, - 243, 244, 245, 0, 136, 139, 142, 145, 143, 146, - 151, 154, 156, 159, 163, 164, 165, 0, 29, 0, - 0, -2, -2, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 262, 0, - 0, 0, 0, 274, 0, 0, 0, 0, 0, 250, - 251, 81, 0, 86, 0, 56, 59, 61, 62, 63, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 65, 69, 0, 93, 95, 98, 102, - 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, - 169, 171, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 48, 49, 52, 260, 53, 0, 0, - 0, 0, 263, 264, 0, 77, 0, 83, 85, 54, - 0, 60, 67, 0, 155, 254, 157, 0, 160, 0, - 0, 0, 167, 172, 168, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 265, 266, 0, 269, - 0, 82, 84, 55, 58, 283, 158, 0, 0, 166, - 170, 173, 0, 256, 174, 175, 176, 177, 178, 0, - 179, 180, 181, 182, 183, 189, 190, 191, 192, 0, - 0, 161, 162, 255, 0, 187, 0, 267, 0, 185, - 188, 268, 184, 186, + 0, 0, 0, 0, 279, 280, 0, 283, 0, 90, + 92, 61, 64, 297, 169, 0, 0, 177, 181, 184, + 0, 270, 185, 186, 187, 188, 189, 0, 190, 191, + 192, 193, 194, 200, 201, 202, 203, 70, 0, 58, + 59, 0, 0, 172, 173, 269, 0, 198, 71, 0, + 281, 0, 196, 199, 282, 195, 197, } var yyTok1 = [...]int8{ @@ -773,7 +811,7 @@ var yyTok2 = [...]int8{ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, + 102, 103, 104, 105, 106, 107, 108, 109, } var yyTok3 = [...]int8{ @@ -1298,44 +1336,83 @@ yydefault: yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardOneToMany yyVAL.node.(*BinaryExpr).VectorMatching.Include = yyDollar[3].strings } - case 54: + case 55: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.node = yyDollar[1].node + fill := yyDollar[3].node.(*NumberLiteral).Val + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill + } + case 56: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.node = yyDollar[1].node + fill := yyDollar[3].node.(*NumberLiteral).Val + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill + } + case 57: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.node = yyDollar[1].node + fill := yyDollar[3].node.(*NumberLiteral).Val + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill + } + case 58: + yyDollar = yyS[yypt-5 : yypt+1] + { + yyVAL.node = yyDollar[1].node + fill_left := yyDollar[3].node.(*NumberLiteral).Val + fill_right := yyDollar[5].node.(*NumberLiteral).Val + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right + } + case 59: + yyDollar = yyS[yypt-5 : yypt+1] + { + fill_right := yyDollar[3].node.(*NumberLiteral).Val + fill_left := yyDollar[5].node.(*NumberLiteral).Val + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left + yyVAL.node.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right + } + case 60: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.strings = yyDollar[2].strings } - case 55: + case 61: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.strings = yyDollar[2].strings } - case 56: + case 62: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.strings = []string{} } - case 57: + case 63: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "\"(\"") yyVAL.strings = nil } - case 58: + case 64: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.strings = append(yyDollar[1].strings, yyDollar[3].item.Val) } - case 59: + case 65: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.strings = []string{yyDollar[1].item.Val} } - case 60: + case 66: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "\",\" or \")\"") yyVAL.strings = yyDollar[1].strings } - case 61: + case 67: yyDollar = yyS[yypt-1 : yypt+1] { if !model.UTF8Validation.IsValidLabelName(yyDollar[1].item.Val) { @@ -1343,7 +1420,7 @@ yydefault: } yyVAL.item = yyDollar[1].item } - case 62: + case 68: yyDollar = yyS[yypt-1 : yypt+1] { unquoted := yylex.(*parser).unquoteString(yyDollar[1].item.Val) @@ -1354,13 +1431,28 @@ yydefault: yyVAL.item.Pos++ yyVAL.item.Val = unquoted } - case 63: + case 69: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "label") yyVAL.item = Item{} } - case 64: + case 70: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.node = yyDollar[2].node.(*NumberLiteral) + } + case 71: + yyDollar = yyS[yypt-4 : yypt+1] + { + nl := yyDollar[3].node.(*NumberLiteral) + if yyDollar[2].item.Typ == SUB { + nl.Val *= -1 + } + nl.PosRange.Start = yyDollar[2].item.Pos + yyVAL.node = nl + } + case 72: yyDollar = yyS[yypt-2 : yypt+1] { fn, exist := getFunction(yyDollar[1].item.Val, yylex.(*parser).functions) @@ -1379,38 +1471,38 @@ yydefault: }, } } - case 65: + case 73: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[2].node } - case 66: + case 74: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = Expressions{} } - case 67: + case 75: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = append(yyDollar[1].node.(Expressions), yyDollar[3].node.(Expr)) } - case 68: + case 76: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = Expressions{yyDollar[1].node.(Expr)} } - case 69: + case 77: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "trailing commas not allowed in function call args") yyVAL.node = yyDollar[1].node } - case 70: + case 78: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &ParenExpr{Expr: yyDollar[2].node.(Expr), PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item)} } - case 71: + case 79: yyDollar = yyS[yypt-1 : yypt+1] { if numLit, ok := yyDollar[1].node.(*NumberLiteral); ok { @@ -1424,7 +1516,7 @@ yydefault: } yyVAL.node = yyDollar[1].node } - case 72: + case 80: yyDollar = yyS[yypt-3 : yypt+1] { if numLit, ok := yyDollar[3].node.(*NumberLiteral); ok { @@ -1435,41 +1527,41 @@ yydefault: yylex.(*parser).addOffsetExpr(yyDollar[1].node, yyDollar[3].node.(*DurationExpr)) yyVAL.node = yyDollar[1].node } - case 73: + case 81: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("offset", "number, duration, step(), or range()") yyVAL.node = yyDollar[1].node } - case 74: + case 82: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).setAnchored(yyDollar[1].node) } - case 75: + case 83: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).setSmoothed(yyDollar[1].node) } - case 76: + case 84: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).setTimestamp(yyDollar[1].node, yyDollar[3].float) yyVAL.node = yyDollar[1].node } - case 77: + case 85: yyDollar = yyS[yypt-5 : yypt+1] { yylex.(*parser).setAtModifierPreprocessor(yyDollar[1].node, yyDollar[3].item) yyVAL.node = yyDollar[1].node } - case 78: + case 86: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("@", "timestamp") yyVAL.node = yyDollar[1].node } - case 81: + case 89: yyDollar = yyS[yypt-4 : yypt+1] { var errMsg string @@ -1499,7 +1591,7 @@ yydefault: EndPos: yylex.(*parser).lastClosing, } } - case 82: + case 90: yyDollar = yyS[yypt-6 : yypt+1] { var rangeNl time.Duration @@ -1521,7 +1613,7 @@ yydefault: EndPos: yyDollar[6].item.Pos + 1, } } - case 83: + case 91: yyDollar = yyS[yypt-5 : yypt+1] { var rangeNl time.Duration @@ -1536,31 +1628,31 @@ yydefault: EndPos: yyDollar[5].item.Pos + 1, } } - case 84: + case 92: yyDollar = yyS[yypt-6 : yypt+1] { yylex.(*parser).unexpected("subquery selector", "\"]\"") yyVAL.node = yyDollar[1].node } - case 85: + case 93: yyDollar = yyS[yypt-5 : yypt+1] { yylex.(*parser).unexpected("subquery selector", "number, duration, step(), range(), or \"]\"") yyVAL.node = yyDollar[1].node } - case 86: + case 94: yyDollar = yyS[yypt-4 : yypt+1] { yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\"") yyVAL.node = yyDollar[1].node } - case 87: + case 95: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("subquery or range selector", "number, duration, step(), or range()") yyVAL.node = yyDollar[1].node } - case 88: + case 96: yyDollar = yyS[yypt-2 : yypt+1] { if nl, ok := yyDollar[2].node.(*NumberLiteral); ok { @@ -1573,7 +1665,7 @@ yydefault: yyVAL.node = &UnaryExpr{Op: yyDollar[1].item.Typ, Expr: yyDollar[2].node.(Expr), StartPos: yyDollar[1].item.Pos} } } - case 89: + case 97: yyDollar = yyS[yypt-2 : yypt+1] { vs := yyDollar[2].node.(*VectorSelector) @@ -1582,7 +1674,7 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 90: + case 98: yyDollar = yyS[yypt-1 : yypt+1] { vs := &VectorSelector{ @@ -1593,14 +1685,14 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 91: + case 99: yyDollar = yyS[yypt-1 : yypt+1] { vs := yyDollar[1].node.(*VectorSelector) yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 92: + case 100: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1608,7 +1700,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item), } } - case 93: + case 101: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1616,7 +1708,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[4].item), } } - case 94: + case 102: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1624,7 +1716,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[2].item), } } - case 95: + case 103: yyDollar = yyS[yypt-3 : yypt+1] { if yyDollar[1].matchers != nil { @@ -1633,144 +1725,144 @@ yydefault: yyVAL.matchers = yyDollar[1].matchers } } - case 96: + case 104: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.matchers = []*labels.Matcher{yyDollar[1].matcher} } - case 97: + case 105: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label matching", "\",\" or \"}\"") yyVAL.matchers = yyDollar[1].matchers } - case 98: + case 106: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) } - case 99: + case 107: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) } - case 100: + case 108: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.matcher = yylex.(*parser).newMetricNameMatcher(yyDollar[1].item) } - case 101: + case 109: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("label matching", "string") yyVAL.matcher = nil } - case 102: + case 110: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("label matching", "string") yyVAL.matcher = nil } - case 103: + case 111: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label matching", "label matching operator") yyVAL.matcher = nil } - case 104: + case 112: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("label matching", "identifier or \"}\"") yyVAL.matcher = nil } - case 105: + case 113: yyDollar = yyS[yypt-2 : yypt+1] { b := labels.NewBuilder(yyDollar[2].labels) b.Set(labels.MetricName, yyDollar[1].item.Val) yyVAL.labels = b.Labels() } - case 106: + case 114: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.labels = yyDollar[1].labels } - case 135: + case 146: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.labels = labels.New(yyDollar[2].lblList...) } - case 136: + case 147: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.labels = labels.New(yyDollar[2].lblList...) } - case 137: + case 148: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.labels = labels.New() } - case 138: + case 149: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.labels = labels.New() } - case 139: + case 150: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label) } - case 140: + case 151: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.lblList = []labels.Label{yyDollar[1].label} } - case 141: + case 152: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\",\" or \"}\"") yyVAL.lblList = yyDollar[1].lblList } - case 142: + case 153: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } - case 143: + case 154: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } - case 144: + case 155: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.label = labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val} } - case 145: + case 156: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("label set", "string") yyVAL.label = labels.Label{} } - case 146: + case 157: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("label set", "string") yyVAL.label = labels.Label{} } - case 147: + case 158: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\"=\"") yyVAL.label = labels.Label{} } - case 148: + case 159: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("label set", "identifier or \"}\"") yyVAL.label = labels.Label{} } - case 149: + case 160: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).generatedParserResult = &seriesDescription{ @@ -1778,33 +1870,33 @@ yydefault: values: yyDollar[2].series, } } - case 150: + case 161: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.series = []SequenceValue{} } - case 151: + case 162: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...) } - case 152: + case 163: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.series = yyDollar[1].series } - case 153: + case 164: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("series values", "") yyVAL.series = nil } - case 154: + case 165: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Omitted: true}} } - case 155: + case 166: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1812,12 +1904,12 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Omitted: true}) } } - case 156: + case 167: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}} } - case 157: + case 168: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1826,7 +1918,7 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float}) } } - case 158: + case 169: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1836,12 +1928,12 @@ yydefault: yyDollar[1].float += yyDollar[2].float } } - case 159: + case 170: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Histogram: yyDollar[1].histogram}} } - case 160: + case 171: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1851,7 +1943,7 @@ yydefault: //$1 += $2 } } - case 161: + case 172: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsIncreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1860,7 +1952,7 @@ yydefault: } yyVAL.series = val } - case 162: + case 173: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsDecreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1869,7 +1961,7 @@ yydefault: } yyVAL.series = val } - case 163: + case 174: yyDollar = yyS[yypt-1 : yypt+1] { if yyDollar[1].item.Val != "stale" { @@ -1877,130 +1969,130 @@ yydefault: } yyVAL.float = math.Float64frombits(value.StaleNaN) } - case 166: + case 177: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } - case 167: + case 178: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } - case 168: + case 179: yyDollar = yyS[yypt-3 : yypt+1] { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } - case 169: + case 180: yyDollar = yyS[yypt-2 : yypt+1] { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } - case 170: + case 181: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = *(yylex.(*parser).mergeMaps(&yyDollar[1].descriptors, &yyDollar[3].descriptors)) } - case 171: + case 182: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.descriptors = yyDollar[1].descriptors } - case 172: + case 183: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("histogram description", "histogram description key, e.g. buckets:[5 10 7]") } - case 173: + case 184: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["schema"] = yyDollar[3].int } - case 174: + case 185: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["sum"] = yyDollar[3].float } - case 175: + case 186: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["count"] = yyDollar[3].float } - case 176: + case 187: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket"] = yyDollar[3].float } - case 177: + case 188: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket_w"] = yyDollar[3].float } - case 178: + case 189: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["custom_values"] = yyDollar[3].bucket_set } - case 179: + case 190: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["buckets"] = yyDollar[3].bucket_set } - case 180: + case 191: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["offset"] = yyDollar[3].int } - case 181: + case 192: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_buckets"] = yyDollar[3].bucket_set } - case 182: + case 193: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_offset"] = yyDollar[3].int } - case 183: + case 194: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["counter_reset_hint"] = yyDollar[3].item } - case 184: + case 195: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.bucket_set = yyDollar[2].bucket_set } - case 185: + case 196: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.bucket_set = yyDollar[2].bucket_set } - case 186: + case 197: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.bucket_set = append(yyDollar[1].bucket_set, yyDollar[3].float) } - case 187: + case 198: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.bucket_set = []float64{yyDollar[1].float} } - case 246: + case 260: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &NumberLiteral{ @@ -2008,7 +2100,7 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 247: + case 261: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -2023,12 +2115,12 @@ yydefault: Duration: true, } } - case 248: + case 262: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val) } - case 249: + case 263: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -2039,17 +2131,17 @@ yydefault: } yyVAL.float = dur.Seconds() } - case 250: + case 264: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = yyDollar[2].float } - case 251: + case 265: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = -yyDollar[2].float } - case 254: + case 268: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -2058,17 +2150,17 @@ yydefault: yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "invalid repetition in series values: %s", err) } } - case 255: + case 269: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.int = -int64(yyDollar[2].uint) } - case 256: + case 270: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.int = int64(yyDollar[1].uint) } - case 257: + case 271: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &StringLiteral{ @@ -2076,7 +2168,7 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 258: + case 272: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.item = Item{ @@ -2085,12 +2177,12 @@ yydefault: Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val), } } - case 259: + case 273: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.strings = nil } - case 261: + case 275: yyDollar = yyS[yypt-1 : yypt+1] { nl := yyDollar[1].node.(*NumberLiteral) @@ -2101,7 +2193,7 @@ yydefault: } yyVAL.node = nl } - case 262: + case 276: yyDollar = yyS[yypt-2 : yypt+1] { nl := yyDollar[2].node.(*NumberLiteral) @@ -2116,7 +2208,7 @@ yydefault: nl.PosRange.Start = yyDollar[1].item.Pos yyVAL.node = nl } - case 263: + case 277: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2125,7 +2217,7 @@ yydefault: EndPos: yyDollar[3].item.PositionRange().End, } } - case 264: + case 278: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2134,7 +2226,7 @@ yydefault: EndPos: yyDollar[3].item.PositionRange().End, } } - case 265: + case 279: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2147,7 +2239,7 @@ yydefault: StartPos: yyDollar[1].item.Pos, } } - case 266: + case 280: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2160,7 +2252,7 @@ yydefault: StartPos: yyDollar[1].item.Pos, } } - case 267: + case 281: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2171,7 +2263,7 @@ yydefault: RHS: yyDollar[5].node.(Expr), } } - case 268: + case 282: yyDollar = yyS[yypt-7 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2187,7 +2279,7 @@ yydefault: }, } } - case 269: + case 283: yyDollar = yyS[yypt-4 : yypt+1] { de := yyDollar[3].node.(*DurationExpr) @@ -2202,7 +2294,7 @@ yydefault: } yyVAL.node = yyDollar[3].node } - case 273: + case 287: yyDollar = yyS[yypt-1 : yypt+1] { nl := yyDollar[1].node.(*NumberLiteral) @@ -2213,7 +2305,7 @@ yydefault: } yyVAL.node = nl } - case 274: + case 288: yyDollar = yyS[yypt-2 : yypt+1] { switch expr := yyDollar[2].node.(type) { @@ -2246,25 +2338,25 @@ yydefault: break } } - case 275: + case 289: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) yyVAL.node = &DurationExpr{Op: ADD, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 276: + case 290: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) yyVAL.node = &DurationExpr{Op: SUB, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 277: + case 291: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) yyVAL.node = &DurationExpr{Op: MUL, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 278: + case 292: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) @@ -2275,7 +2367,7 @@ yydefault: } yyVAL.node = &DurationExpr{Op: DIV, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 279: + case 293: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) @@ -2286,13 +2378,13 @@ yydefault: } yyVAL.node = &DurationExpr{Op: MOD, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 280: + case 294: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) yyVAL.node = &DurationExpr{Op: POW, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} } - case 281: + case 295: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2301,7 +2393,7 @@ yydefault: EndPos: yyDollar[3].item.PositionRange().End, } } - case 282: + case 296: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2310,7 +2402,7 @@ yydefault: EndPos: yyDollar[3].item.PositionRange().End, } } - case 283: + case 297: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.node = &DurationExpr{ @@ -2321,7 +2413,7 @@ yydefault: RHS: yyDollar[5].node.(Expr), } } - case 285: + case 299: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).experimentalDurationExpr(yyDollar[2].node.(Expr)) diff --git a/promql/parser/lex.go b/promql/parser/lex.go index b3a82dc0c6..7149985767 100644 --- a/promql/parser/lex.go +++ b/promql/parser/lex.go @@ -137,6 +137,9 @@ var key = map[string]ItemType{ "ignoring": IGNORING, "group_left": GROUP_LEFT, "group_right": GROUP_RIGHT, + "fill": FILL, + "fill_left": FILL_LEFT, + "fill_right": FILL_RIGHT, "bool": BOOL, // Preprocessors. @@ -1083,6 +1086,17 @@ Loop: word := l.input[l.start:l.pos] switch kw, ok := key[strings.ToLower(word)]; { case ok: + // For fill/fill_left/fill_right, only treat as keyword if followed by '(' + // This allows using these as metric names (e.g., "fill + fill"). + // This could be done for other keywords as well, but for the new fill + // modifiers this is especially important so we don't break any existing + // queries. + if kw == FILL || kw == FILL_LEFT || kw == FILL_RIGHT { + if !l.peekFollowedByLeftParen() { + l.emit(IDENTIFIER) + break Loop + } + } l.emit(kw) case !strings.Contains(word, ":"): l.emit(IDENTIFIER) @@ -1098,6 +1112,23 @@ Loop: return lexStatements } +// peekFollowedByLeftParen checks if the next non-whitespace character is '('. +// This is used for context-sensitive keywords like fill/fill_left/fill_right +// that should only be treated as keywords when followed by '('. +func (l *Lexer) peekFollowedByLeftParen() bool { + pos := l.pos + for { + if int(pos) >= len(l.input) { + return false + } + r, w := utf8.DecodeRuneInString(l.input[pos:]) + if !isSpace(r) { + return r == '(' + } + pos += posrange.Pos(w) + } +} + func isSpace(r rune) bool { return r == ' ' || r == '\t' || r == '\n' || r == '\r' } diff --git a/promql/parser/parse.go b/promql/parser/parse.go index 817e0d02d9..a872706364 100644 --- a/promql/parser/parse.go +++ b/promql/parser/parse.go @@ -768,6 +768,9 @@ func (p *parser) checkAST(node Node) (typ ValueType) { if len(n.VectorMatching.MatchingLabels) > 0 { p.addParseErrf(n.PositionRange(), "vector matching only allowed between instant vectors") } + if n.VectorMatching.FillValues.LHS != nil || n.VectorMatching.FillValues.RHS != nil { + p.addParseErrf(n.PositionRange(), "filling in missing series only allowed between instant vectors") + } n.VectorMatching = nil case n.Op.IsSetOperator(): // Both operands are Vectors. if n.VectorMatching.Card == CardOneToMany || n.VectorMatching.Card == CardManyToOne { @@ -776,6 +779,9 @@ func (p *parser) checkAST(node Node) (typ ValueType) { if n.VectorMatching.Card != CardManyToMany { p.addParseErrf(n.PositionRange(), "set operations must always be many-to-many") } + if n.VectorMatching.FillValues.LHS != nil || n.VectorMatching.FillValues.RHS != nil { + p.addParseErrf(n.PositionRange(), "filling in missing series not allowed for set operators") + } } if (lt == ValueTypeScalar || rt == ValueTypeScalar) && n.Op.IsSetOperator() { diff --git a/promql/parser/printer.go b/promql/parser/printer.go index 01e2c46c1b..44ca15e532 100644 --- a/promql/parser/printer.go +++ b/promql/parser/printer.go @@ -172,6 +172,19 @@ func (node *BinaryExpr) getMatchingStr() string { b.WriteString(")") matching += b.String() } + + if vm.FillValues.LHS != nil || vm.FillValues.RHS != nil { + if vm.FillValues.LHS == vm.FillValues.RHS { + matching += fmt.Sprintf(" fill (%v)", *vm.FillValues.LHS) + } else { + if vm.FillValues.LHS != nil { + matching += fmt.Sprintf(" fill_left (%v)", *vm.FillValues.LHS) + } + if vm.FillValues.RHS != nil { + matching += fmt.Sprintf(" fill_right (%v)", *vm.FillValues.RHS) + } + } + } } return matching } diff --git a/promql/parser/printer_test.go b/promql/parser/printer_test.go index 4499fa7860..a5f254527e 100644 --- a/promql/parser/printer_test.go +++ b/promql/parser/printer_test.go @@ -113,6 +113,26 @@ func TestExprString(t *testing.T) { in: `a - ignoring() group_left c`, out: `a - ignoring () group_left () c`, }, + { + in: `a + fill(-23) b`, + out: `a + fill (-23) b`, + }, + { + in: `a + fill_left(-23) b`, + out: `a + fill_left (-23) b`, + }, + { + in: `a + fill_right(42) b`, + out: `a + fill_right (42) b`, + }, + { + in: `a + fill_left(-23) fill_right(42) b`, + out: `a + fill_left (-23) fill_right (42) b`, + }, + { + in: `a + on(b) group_left fill(-23) c`, + out: `a + on (b) group_left () fill (-23) c`, + }, { in: `up > bool 0`, }, diff --git a/web/api/v1/translate_ast.go b/web/api/v1/translate_ast.go index 3cce0583f9..3c2bc09943 100644 --- a/web/api/v1/translate_ast.go +++ b/web/api/v1/translate_ast.go @@ -47,6 +47,10 @@ func translateAST(node parser.Expr) any { "labels": sanitizeList(m.MatchingLabels), "on": m.On, "include": sanitizeList(m.Include), + "fillValues": map[string]*float64{ + "lhs": m.FillValues.LHS, + "rhs": m.FillValues.RHS, + }, } } diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx index e70b7a3f3e..5c10357561 100644 --- a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx +++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx @@ -8,6 +8,7 @@ import { MatchErrorType, computeVectorVectorBinOp, filteredSampleValue, + MaybeFilledInstantSample, } from "../../../../promql/binOp"; import { formatNode, labelNameList } from "../../../../promql/format"; import { @@ -177,11 +178,10 @@ const explanationText = (node: BinaryExpr): React.ReactNode => { ) : ( - - group_{manySide}({labelNameList(matching.include)}) - - : {matching.card} match. Each series from the {oneSide}-hand side is - allowed to match with multiple series from the {manySide}-hand side. + group_{manySide} + ({labelNameList(matching.include)}) : {matching.card} match. Each + series from the {oneSide}-hand side is allowed to match with + multiple series from the {manySide}-hand side. {matching.include.length !== 0 && ( <> {" "} @@ -192,6 +192,55 @@ const explanationText = (node: BinaryExpr): React.ReactNode => { )} )} + {(matching.fillValues.lhs !== null || + matching.fillValues.rhs !== null) && + (matching.fillValues.lhs === matching.fillValues.rhs ? ( + + fill( + + {matching.fillValues.lhs} + + ) : For series on either side missing a match, fill in the sample + value{" "} + + {matching.fillValues.lhs} + + . + + ) : ( + <> + {matching.fillValues.lhs !== null && ( + + fill_left( + + {matching.fillValues.lhs} + + ) : For series on the left-hand side missing a match, fill in + the sample value{" "} + + {matching.fillValues.lhs} + + . + + )} + + {matching.fillValues.rhs !== null && ( + + fill_right + ( + + {matching.fillValues.rhs} + + ) : For series on the right-hand side missing a match, fill in + the sample value{" "} + + {matching.fillValues.rhs} + + . + + )} + + ))} {node.bool && ( bool: Instead of @@ -239,7 +288,12 @@ const explainError = ( matching: { ...(binOp.matching ? binOp.matching - : { labels: [], on: false, include: [] }), + : { + labels: [], + on: false, + include: [], + fillValues: { lhs: null, rhs: null }, + }), card: err.dupeSide === "left" ? vectorMatchCardinality.manyToOne @@ -403,7 +457,7 @@ const VectorVectorBinaryExprExplainView: FC< ); const matchGroupTable = ( - series: InstantSample[], + series: MaybeFilledInstantSample[], seriesCount: number, color: string, colorOffset?: number @@ -458,6 +512,11 @@ const VectorVectorBinaryExprExplainView: FC< )} format={true} /> + {s.filled && ( + + no match, filling in default value + + )} {showSampleValues && ( diff --git a/web/ui/mantine-ui/src/promql/ast.ts b/web/ui/mantine-ui/src/promql/ast.ts index 94872c6db0..9f8c5cb102 100644 --- a/web/ui/mantine-ui/src/promql/ast.ts +++ b/web/ui/mantine-ui/src/promql/ast.ts @@ -104,11 +104,16 @@ export interface LabelMatcher { value: string; } +export interface FillValues { + lhs: number | null; + rhs: number | null; +} export interface VectorMatching { card: vectorMatchCardinality; labels: string[]; on: boolean; include: string[]; + fillValues: FillValues; } export type StartOrEnd = "start" | "end" | null; diff --git a/web/ui/mantine-ui/src/promql/binOp.test.ts b/web/ui/mantine-ui/src/promql/binOp.test.ts index 72ef16947b..9c5d59a94c 100644 --- a/web/ui/mantine-ui/src/promql/binOp.test.ts +++ b/web/ui/mantine-ui/src/promql/binOp.test.ts @@ -81,6 +81,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -247,6 +248,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1", "label2"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -413,6 +415,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: ["same"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -579,6 +582,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricB, rhs: testMetricC, @@ -701,6 +705,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricB, rhs: testMetricC, @@ -791,6 +796,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricB, rhs: testMetricC, @@ -905,6 +911,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricC, rhs: testMetricB, @@ -1019,6 +1026,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricC, rhs: testMetricB, @@ -1107,6 +1115,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -1223,6 +1232,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -1409,6 +1419,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -1596,6 +1607,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -1763,6 +1775,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -1929,6 +1942,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -2022,6 +2036,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricB, rhs: testMetricC, @@ -2105,6 +2120,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricB, rhs: testMetricC, @@ -2156,6 +2172,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA, rhs: testMetricB, @@ -2342,6 +2359,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA.slice(0, 3), rhs: testMetricB.slice(1, 4), @@ -2474,6 +2492,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA.slice(0, 3), rhs: testMetricB.slice(1, 4), @@ -2568,6 +2587,7 @@ const testCases: TestCase[] = [ on: true, include: [], labels: ["label1"], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA.slice(0, 3), rhs: testMetricB.slice(1, 4), @@ -2700,6 +2720,7 @@ const testCases: TestCase[] = [ on: false, include: [], labels: [], + fillValues: { lhs: null, rhs: null }, }, lhs: testMetricA.slice(0, 3), rhs: testMetricB.slice(1, 4), @@ -2886,6 +2907,7 @@ describe("binOp", () => { on: true, labels: ["label1"], include: [], + fillValues: { lhs: null, rhs: null }, }; const result = resultMetric(lhs, rhs, op, matching); @@ -2911,6 +2933,7 @@ describe("binOp", () => { on: true, labels: ["label1"], include: [], + fillValues: { lhs: null, rhs: null }, }; const result = resultMetric(lhs, rhs, op, matching); @@ -2931,6 +2954,7 @@ describe("binOp", () => { on: true, labels: ["label1"], include: ["label2"], + fillValues: { lhs: null, rhs: null }, }; const result = resultMetric(lhs, rhs, op, matching); diff --git a/web/ui/mantine-ui/src/promql/binOp.ts b/web/ui/mantine-ui/src/promql/binOp.ts index dbfa64be2c..f583bf81bb 100644 --- a/web/ui/mantine-ui/src/promql/binOp.ts +++ b/web/ui/mantine-ui/src/promql/binOp.ts @@ -45,13 +45,18 @@ export type VectorMatchError = | MultipleMatchesOnBothSidesError | MultipleMatchesOnOneSideError; +export type MaybeFilledInstantSample = InstantSample & { + // If the sample was filled in via a fill(...) modifier, this is true. + filled?: boolean; +}; + // A single match group as produced by a vector-to-vector binary operation, with all of its // left-hand side and right-hand side series, as well as a result and error, if applicable. export type BinOpMatchGroup = { groupLabels: Metric; - rhs: InstantSample[]; + rhs: MaybeFilledInstantSample[]; rhsCount: number; // Number of samples before applying limits. - lhs: InstantSample[]; + lhs: MaybeFilledInstantSample[]; lhsCount: number; // Number of samples before applying limits. result: { sample: InstantSample; @@ -338,6 +343,26 @@ export const computeVectorVectorBinOp = ( groups[sig].lhsCount++; }); + // Check for any LHS / RHS with no series and fill in default values, if specified. + Object.values(groups).forEach((mg) => { + if (mg.lhs.length === 0 && matching.fillValues.lhs !== null) { + mg.lhs.push({ + metric: {}, + value: [0, formatPrometheusFloat(matching.fillValues.lhs as number)], + filled: true, + }); + mg.lhsCount = 1; + } + if (mg.rhs.length === 0 && matching.fillValues.rhs !== null) { + mg.rhs.push({ + metric: {}, + value: [0, formatPrometheusFloat(matching.fillValues.rhs as number)], + filled: true, + }); + mg.rhsCount = 1; + } + }); + // Annotate the match groups with errors (if any) and populate the results. Object.values(groups).forEach((mg) => { switch (matching.card) { diff --git a/web/ui/mantine-ui/src/promql/format.tsx b/web/ui/mantine-ui/src/promql/format.tsx index 75b1965b35..8602c65a82 100644 --- a/web/ui/mantine-ui/src/promql/format.tsx +++ b/web/ui/mantine-ui/src/promql/format.tsx @@ -265,6 +265,7 @@ const formatNodeInternal = ( case nodeType.binaryExpr: { let matching = <>; let grouping = <>; + let fill = <>; const vm = node.matching; if (vm !== null) { if ( @@ -305,6 +306,45 @@ const formatNodeInternal = ( ); } + + const lfill = vm.fillValues.lhs; + const rfill = vm.fillValues.rhs; + if (lfill !== null || rfill !== null) { + if (lfill === rfill) { + fill = ( + <> + {" "} + fill + ( + {lfill} + ) + + ); + } else { + fill = ( + <> + {lfill !== null && ( + <> + {" "} + fill_left + ( + {lfill} + ) + + )} + {rfill !== null && ( + <> + {" "} + fill_right + ( + {rfill} + ) + + )} + + ); + } + } } return ( @@ -327,7 +367,8 @@ const formatNodeInternal = ( )} {matching} - {grouping}{" "} + {grouping} + {fill}{" "} {showChildren && formatNode( maybeParenthesizeBinopChild(node.op, node.rhs), diff --git a/web/ui/mantine-ui/src/promql/serialize.ts b/web/ui/mantine-ui/src/promql/serialize.ts index 584e1ae9ff..50c32c49e4 100644 --- a/web/ui/mantine-ui/src/promql/serialize.ts +++ b/web/ui/mantine-ui/src/promql/serialize.ts @@ -135,6 +135,7 @@ const serializeNode = ( case nodeType.binaryExpr: { let matching = ""; let grouping = ""; + let fill = ""; const vm = node.matching; if (vm !== null) { if ( @@ -152,11 +153,26 @@ const serializeNode = ( ) { grouping = ` group_${vm.card === vectorMatchCardinality.manyToOne ? "left" : "right"}(${labelNameList(vm.include)})`; } + + const lfill = vm.fillValues.lhs; + const rfill = vm.fillValues.rhs; + if (lfill !== null || rfill !== null) { + if (lfill === rfill) { + fill = ` fill(${lfill})`; + } else { + if (lfill !== null) { + fill += ` fill_left(${lfill})`; + } + if (rfill !== null) { + fill += ` fill_right(${rfill})`; + } + } + } } return `${serializeNode(maybeParenthesizeBinopChild(node.op, node.lhs), childIndent, pretty)}${childSeparator}${ind}${ node.op - }${node.bool ? " bool" : ""}${matching}${grouping}${childSeparator}${serializeNode( + }${node.bool ? " bool" : ""}${matching}${grouping}${fill}${childSeparator}${serializeNode( maybeParenthesizeBinopChild(node.op, node.rhs), childIndent, pretty diff --git a/web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts b/web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts index a3734d311f..f9ff039882 100644 --- a/web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts +++ b/web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts @@ -658,6 +658,7 @@ describe("serializeNode and formatNode", () => { labels: [], on: false, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -677,6 +678,7 @@ describe("serializeNode and formatNode", () => { labels: [], on: true, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -696,6 +698,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -715,6 +718,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: false, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -735,6 +739,7 @@ describe("serializeNode and formatNode", () => { labels: [], on: false, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -755,6 +760,7 @@ describe("serializeNode and formatNode", () => { labels: [], on: false, include: ["__name__"], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -774,6 +780,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -793,6 +800,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: ["label3"], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -812,6 +820,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: [], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -831,6 +840,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: ["label3"], + fillValues: { lhs: null, rhs: null }, }, bool: false, }, @@ -864,6 +874,7 @@ describe("serializeNode and formatNode", () => { labels: ["label1", "label2"], on: true, include: ["label3"], + fillValues: { lhs: null, rhs: null }, }, bool: true, }, @@ -911,6 +922,7 @@ describe("serializeNode and formatNode", () => { include: ["c", "ü"], labels: ["b", "ö"], on: true, + fillValues: { lhs: null, rhs: null }, }, op: binaryOperatorType.div, rhs: { @@ -948,6 +960,7 @@ describe("serializeNode and formatNode", () => { include: [], labels: ["e", "ö"], on: false, + fillValues: { lhs: null, rhs: null }, }, op: binaryOperatorType.add, rhs: { diff --git a/web/ui/module/codemirror-promql/src/complete/promql.terms.ts b/web/ui/module/codemirror-promql/src/complete/promql.terms.ts index d356268d74..3670fffff7 100644 --- a/web/ui/module/codemirror-promql/src/complete/promql.terms.ts +++ b/web/ui/module/codemirror-promql/src/complete/promql.terms.ts @@ -39,6 +39,10 @@ export const binOpModifierTerms = [ { label: 'ignoring', info: 'Ignore specified labels for matching', type: 'keyword' }, { label: 'group_left', info: 'Allow many-to-one matching', type: 'keyword' }, { label: 'group_right', info: 'Allow one-to-many matching', type: 'keyword' }, + { label: 'bool', info: 'Return boolean result (0 or 1) instead of filtering', type: 'keyword' }, + { label: 'fill', info: 'Fill in missing series on both sides', type: 'keyword' }, + { label: 'fill_left', info: 'Fill in missing series on the left side', type: 'keyword' }, + { label: 'fill_right', info: 'Fill in missing series on the right side', type: 'keyword' }, ]; export const atModifierTerms = [ diff --git a/web/ui/module/codemirror-promql/src/parser/vector.test.ts b/web/ui/module/codemirror-promql/src/parser/vector.test.ts index f628206538..c6eeb930ab 100644 --- a/web/ui/module/codemirror-promql/src/parser/vector.test.ts +++ b/web/ui/module/codemirror-promql/src/parser/vector.test.ts @@ -15,29 +15,31 @@ import { buildVectorMatching } from './vector'; import { createEditorState } from '../test/utils-test'; import { BinaryExpr } from '@prometheus-io/lezer-promql'; import { syntaxTree } from '@codemirror/language'; -import { VectorMatchCardinality } from '../types'; +import { VectorMatchCardinality, VectorMatching } from '../types'; + +const noFill = { fill: { lhs: null, rhs: null } }; describe('buildVectorMatching test', () => { - const testCases = [ + const testCases: { binaryExpr: string; expectedVectorMatching: VectorMatching }[] = [ { binaryExpr: 'foo * bar', - expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] }, + expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill }, }, { binaryExpr: 'foo * sum', - expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] }, + expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill }, }, { binaryExpr: 'foo == 1', - expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] }, + expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill }, }, { binaryExpr: 'foo == bool 1', - expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] }, + expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill }, }, { binaryExpr: '2.5 / bar', - expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] }, + expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill }, }, { binaryExpr: 'foo and bar', @@ -46,6 +48,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -55,6 +58,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -64,6 +68,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -75,6 +80,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -86,6 +92,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -95,6 +102,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: true, include: [], + ...noFill, }, }, { @@ -104,6 +112,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: true, include: [], + ...noFill, }, }, { @@ -113,6 +122,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: true, include: [], + ...noFill, }, }, { @@ -122,6 +132,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: true, include: [], + ...noFill, }, }, { @@ -131,6 +142,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: false, include: [], + ...noFill, }, }, { @@ -140,6 +152,7 @@ describe('buildVectorMatching test', () => { matchingLabels: [], on: false, include: [], + ...noFill, }, }, { @@ -149,6 +162,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['bar'], on: true, include: [], + ...noFill, }, }, { @@ -158,6 +172,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: true, include: ['bar'], + ...noFill, }, }, { @@ -167,6 +182,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: false, include: ['blub'], + ...noFill, }, }, { @@ -176,6 +192,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: false, include: ['bar'], + ...noFill, }, }, { @@ -185,6 +202,7 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: true, include: ['bar', 'foo'], + ...noFill, }, }, { @@ -194,6 +212,57 @@ describe('buildVectorMatching test', () => { matchingLabels: ['test', 'blub'], on: false, include: ['bar', 'foo'], + ...noFill, + }, + }, + { + binaryExpr: 'foo + fill(23) bar', + expectedVectorMatching: { + card: VectorMatchCardinality.CardOneToOne, + matchingLabels: [], + on: false, + include: [], + fill: { lhs: 23, rhs: 23 }, + }, + }, + { + binaryExpr: 'foo + fill_left(23) bar', + expectedVectorMatching: { + card: VectorMatchCardinality.CardOneToOne, + matchingLabels: [], + on: false, + include: [], + fill: { lhs: 23, rhs: null }, + }, + }, + { + binaryExpr: 'foo + fill_right(23) bar', + expectedVectorMatching: { + card: VectorMatchCardinality.CardOneToOne, + matchingLabels: [], + on: false, + include: [], + fill: { lhs: null, rhs: 23 }, + }, + }, + { + binaryExpr: 'foo + fill_left(23) fill_right(42) bar', + expectedVectorMatching: { + card: VectorMatchCardinality.CardOneToOne, + matchingLabels: [], + on: false, + include: [], + fill: { lhs: 23, rhs: 42 }, + }, + }, + { + binaryExpr: 'foo + fill_right(23) fill_left(42) bar', + expectedVectorMatching: { + card: VectorMatchCardinality.CardOneToOne, + matchingLabels: [], + on: false, + include: [], + fill: { lhs: 42, rhs: 23 }, }, }, ]; @@ -203,7 +272,7 @@ describe('buildVectorMatching test', () => { const node = syntaxTree(state).topNode.getChild(BinaryExpr); expect(node).toBeTruthy(); if (node) { - expect(value.expectedVectorMatching).toEqual(buildVectorMatching(state, node)); + expect(buildVectorMatching(state, node)).toEqual(value.expectedVectorMatching); } }); }); diff --git a/web/ui/module/codemirror-promql/src/parser/vector.ts b/web/ui/module/codemirror-promql/src/parser/vector.ts index c47ca1fb76..9fc31bf5c6 100644 --- a/web/ui/module/codemirror-promql/src/parser/vector.ts +++ b/web/ui/module/codemirror-promql/src/parser/vector.ts @@ -24,6 +24,11 @@ import { On, Or, Unless, + NumberDurationLiteral, + FillModifier, + FillClause, + FillLeftClause, + FillRightClause, } from '@prometheus-io/lezer-promql'; import { VectorMatchCardinality, VectorMatching } from '../types'; import { containsAtLeastOneChild } from './path-finder'; @@ -37,6 +42,10 @@ export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode): matchingLabels: [], on: false, include: [], + fill: { + lhs: null, + rhs: null, + }, }; const modifierClause = binaryNode.getChild(MatchingModifierClause); if (modifierClause) { @@ -60,6 +69,32 @@ export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode): } } + const fillModifier = binaryNode.getChild(FillModifier); + if (fillModifier) { + const fill = fillModifier.getChild(FillClause); + const fillLeft = fillModifier.getChild(FillLeftClause); + const fillRight = fillModifier.getChild(FillRightClause); + + const getFillValue = (node: SyntaxNode) => { + const valueNode = node.getChild(NumberDurationLiteral); + return valueNode ? parseFloat(state.sliceDoc(valueNode.from, valueNode.to)) : null; + }; + + if (fill) { + const value = getFillValue(fill); + result.fill.lhs = value; + result.fill.rhs = value; + } + + if (fillLeft) { + result.fill.lhs = getFillValue(fillLeft); + } + + if (fillRight) { + result.fill.rhs = getFillValue(fillRight); + } + } + const isSetOperator = containsAtLeastOneChild(binaryNode, And, Or, Unless); if (isSetOperator && result.card === VectorMatchCardinality.CardOneToOne) { result.card = VectorMatchCardinality.CardManyToMany; diff --git a/web/ui/module/codemirror-promql/src/types/vector.ts b/web/ui/module/codemirror-promql/src/types/vector.ts index 4e7a4f4c45..709b0b76d6 100644 --- a/web/ui/module/codemirror-promql/src/types/vector.ts +++ b/web/ui/module/codemirror-promql/src/types/vector.ts @@ -18,6 +18,11 @@ export enum VectorMatchCardinality { CardManyToMany = 'many-to-many', } +export interface FillValues { + lhs: number | null; + rhs: number | null; +} + export interface VectorMatching { // The cardinality of the two Vectors. card: VectorMatchCardinality; @@ -30,4 +35,6 @@ export interface VectorMatching { // Include contains additional labels that should be included in // the result from the side with the lower cardinality. include: string[]; + // Fill contains optional fill values for missing elements. + fill: FillValues; } diff --git a/web/ui/module/lezer-promql/src/promql.grammar b/web/ui/module/lezer-promql/src/promql.grammar index 5fe8d4d025..9308ad01be 100644 --- a/web/ui/module/lezer-promql/src/promql.grammar +++ b/web/ui/module/lezer-promql/src/promql.grammar @@ -101,11 +101,30 @@ MatchingModifierClause { ((GroupLeft | GroupRight) (!group GroupingLabels)?)? } +FillClause { + Fill "(" NumberDurationLiteral ")" +} + +FillLeftClause { + FillLeft "(" NumberDurationLiteral ")" +} + +FillRightClause { + FillRight "(" NumberDurationLiteral ")" +} + +FillModifier { + (FillClause | FillLeftClause | FillRightClause) | + (FillLeftClause FillRightClause) | + (FillRightClause FillLeftClause) +} + BoolModifier { Bool } binModifiers { BoolModifier? MatchingModifierClause? + FillModifier? } GroupingLabels { @@ -366,7 +385,10 @@ NumberDurationLiteralInDurationContext { Start, End, Smoothed, - Anchored + Anchored, + Fill, + FillLeft, + FillRight } @external propSource promQLHighLight from "./highlight" diff --git a/web/ui/module/lezer-promql/src/tokens.js b/web/ui/module/lezer-promql/src/tokens.js index 523c306ae9..6fd681f1f8 100644 --- a/web/ui/module/lezer-promql/src/tokens.js +++ b/web/ui/module/lezer-promql/src/tokens.js @@ -12,82 +12,88 @@ // limitations under the License. import { - And, - Avg, - Atan2, - Bool, - Bottomk, - By, - Count, - CountValues, - End, - Group, - GroupLeft, - GroupRight, - Ignoring, - inf, - Max, - Min, - nan, - Offset, - On, - Or, - Quantile, - LimitK, - LimitRatio, - Start, - Stddev, - Stdvar, - Sum, - Topk, - Unless, - Without, - Smoothed, - Anchored, -} from './parser.terms.js'; + And, + Avg, + Atan2, + Bool, + Bottomk, + By, + Count, + CountValues, + End, + Group, + GroupLeft, + GroupRight, + Ignoring, + inf, + Max, + Min, + nan, + Offset, + On, + Or, + Quantile, + LimitK, + LimitRatio, + Start, + Stddev, + Stdvar, + Sum, + Topk, + Unless, + Without, + Smoothed, + Anchored, + Fill, + FillLeft, + FillRight, +} from "./parser.terms.js"; const keywordTokens = { - inf: inf, - nan: nan, - bool: Bool, - ignoring: Ignoring, - on: On, - group_left: GroupLeft, - group_right: GroupRight, - offset: Offset, + inf: inf, + nan: nan, + bool: Bool, + ignoring: Ignoring, + on: On, + group_left: GroupLeft, + group_right: GroupRight, + offset: Offset, }; export const specializeIdentifier = (value, stack) => { - return keywordTokens[value.toLowerCase()] || -1; + return keywordTokens[value.toLowerCase()] || -1; }; const contextualKeywordTokens = { - avg: Avg, - atan2: Atan2, - bottomk: Bottomk, - count: Count, - count_values: CountValues, - group: Group, - max: Max, - min: Min, - quantile: Quantile, - limitk: LimitK, - limit_ratio: LimitRatio, - stddev: Stddev, - stdvar: Stdvar, - sum: Sum, - topk: Topk, - by: By, - without: Without, - and: And, - or: Or, - unless: Unless, - start: Start, - end: End, - smoothed: Smoothed, - anchored: Anchored, + avg: Avg, + atan2: Atan2, + bottomk: Bottomk, + count: Count, + count_values: CountValues, + group: Group, + max: Max, + min: Min, + quantile: Quantile, + limitk: LimitK, + limit_ratio: LimitRatio, + stddev: Stddev, + stdvar: Stdvar, + sum: Sum, + topk: Topk, + by: By, + without: Without, + and: And, + or: Or, + unless: Unless, + start: Start, + end: End, + smoothed: Smoothed, + anchored: Anchored, + fill: Fill, + fill_left: FillLeft, + fill_right: FillRight, }; export const extendIdentifier = (value, stack) => { - return contextualKeywordTokens[value.toLowerCase()] || -1; + return contextualKeywordTokens[value.toLowerCase()] || -1; }; From 57dd1f18b4686647af93f83beead2d5fa4f2345e Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Wed, 10 Dec 2025 19:25:08 +0100 Subject: [PATCH 028/165] Add fill modifier PromQL tests Signed-off-by: Julius Volz --- promql/promqltest/testdata/fill-modifier.test | 343 ++++++++++++++++++ 1 file changed, 343 insertions(+) create mode 100644 promql/promqltest/testdata/fill-modifier.test diff --git a/promql/promqltest/testdata/fill-modifier.test b/promql/promqltest/testdata/fill-modifier.test new file mode 100644 index 0000000000..08c4396242 --- /dev/null +++ b/promql/promqltest/testdata/fill-modifier.test @@ -0,0 +1,343 @@ +# ==================== fill / fill_left / fill_right modifier tests ==================== + +# Test data for fill modifier tests: vectors with partial overlap. +load 5m + left_vector{label="a"} 10 + left_vector{label="b"} 20 + left_vector{label="c"} 30 + right_vector{label="a"} 100 + right_vector{label="b"} 200 + right_vector{label="d"} 400 + +# ---------- Arithmetic operators with fill modifiers ---------- + +# fill(0): Fill both sides with 0 for addition. +eval instant at 0m left_vector + fill(0) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} 30 + {label="d"} 400 + +# fill_left(0): Only fill left side with 0. +eval instant at 0m left_vector + fill_left(0) right_vector + {label="a"} 110 + {label="b"} 220 + {label="d"} 400 + +# fill_right(0): Only fill right side with 0. +eval instant at 0m left_vector + fill_right(0) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} 30 + +# fill_left and fill_right with different values. +eval instant at 0m left_vector + fill_left(5) fill_right(7) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} 37 + {label="d"} 405 + +# fill with NaN. +eval instant at 0m left_vector + fill(NaN) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} NaN + {label="d"} NaN + +# fill with Inf. +eval instant at 0m left_vector + fill(Inf) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} +Inf + {label="d"} +Inf + +# fill with -Inf. +eval instant at 0m left_vector + fill(-Inf) right_vector + {label="a"} 110 + {label="b"} 220 + {label="c"} -Inf + {label="d"} -Inf + +# ---------- Comparison operators with fill modifiers ---------- + +# fill with equality comparison. +eval instant at 0m left_vector == fill(30) right_vector + left_vector{label="c"} 30 + +# fill with inequality comparison. +eval instant at 0m left_vector != fill(30) right_vector + left_vector{label="a"} 10 + left_vector{label="b"} 20 + {label="d"} 30 + +# fill with greater than. +eval instant at 0m left_vector > fill(25) right_vector + left_vector{label="c"} 30 + +# ---------- Comparison operators with bool modifier and fill ---------- + +# fill with equality comparison and bool. +eval instant at 0m left_vector == bool fill(30) right_vector + {label="a"} 0 + {label="b"} 0 + {label="c"} 1 + {label="d"} 0 + +# fill with inequality comparison and bool. +eval instant at 0m left_vector != bool fill(30) right_vector + {label="a"} 1 + {label="b"} 1 + {label="c"} 0 + {label="d"} 1 + +# fill with greater than and bool. +eval instant at 0m left_vector > bool fill(25) right_vector + {label="a"} 0 + {label="b"} 0 + {label="c"} 1 + {label="d"} 0 + +# ---------- fill with on() and ignoring() modifiers ---------- + +clear + +load 5m + left_vector{job="foo", instance="a"} 10 + left_vector{job="foo", instance="b"} 20 + left_vector{job="bar", instance="a"} 30 + right_vector{job="foo", instance="a"} 100 + right_vector{job="foo", instance="c"} 300 + +# fill with on(). +eval instant at 0m left_vector + on(job, instance) fill(0) right_vector + {job="foo", instance="a"} 110 + {job="foo", instance="b"} 20 + {job="bar", instance="a"} 30 + {job="foo", instance="c"} 300 + +# fill_right with on(). +eval instant at 0m left_vector + on(job, instance) fill_right(0) right_vector + {job="foo", instance="a"} 110 + {job="foo", instance="b"} 20 + {job="bar", instance="a"} 30 + +# fill_left with on(). +eval instant at 0m left_vector + on(job, instance) fill_left(0) right_vector + {job="foo", instance="a"} 110 + {job="foo", instance="c"} 300 + +# fill with ignoring() - requires group_left since ignoring(job) creates many-to-one matching +# when two left_vector series have same instance but different jobs. +eval instant at 0m left_vector + ignoring(job) group_left fill(0) right_vector + {instance="a", job="foo"} 110 + {instance="a", job="bar"} 130 + {instance="b", job="foo"} 20 + {instance="c"} 300 + +# ---------- fill with group_left / group_right (many-to-one / one-to-many) ---------- + +clear + +load 5m + requests{method="GET", status="200"} 100 + requests{method="POST", status="200"} 200 + requests{method="GET", status="500"} 10 + requests{method="POST", status="500"} 20 + limits{status="200"} 1000 + limits{status="404"} 500 + limits{status="500"} 50 + +# group_left with fill_right: fill missing "one" side series. +eval instant at 0m requests / on(status) group_left fill_right(1) limits + {method="GET", status="200"} 0.1 + {method="POST", status="200"} 0.2 + {method="GET", status="500"} 0.2 + {method="POST", status="500"} 0.4 + +# group_left with fill_left: fill missing "many" side series. +# For status="404", there's no matching requests, so a single series with the match group's labels is filled +eval instant at 0m requests + on(status) group_left fill_left(0) limits + {method="GET", status="200"} 1100 + {method="POST", status="200"} 1200 + {method="GET", status="500"} 60 + {method="POST", status="500"} 70 + {status="404"} 500 + +# group_left with fill on both sides. +eval instant at 0m requests + on(status) group_left fill(0) limits + {method="GET", status="200"} 1100 + {method="POST", status="200"} 1200 + {method="GET", status="500"} 60 + {method="POST", status="500"} 70 + {status="404"} 500 + +# group_right with fill_left: fill missing "one" side series. +clear + +load 5m + cpu_info{instance="a", cpu="0"} 1 + cpu_info{instance="a", cpu="1"} 1 + cpu_info{instance="b", cpu="0"} 1 + node_meta{instance="a"} 100 + node_meta{instance="c"} 300 + +# fill_left fills the "one" side (node_meta) when missing for a "many" side series. +eval instant at 0m node_meta * on(instance) group_right fill_left(1) cpu_info + {instance="a", cpu="0"} 100 + {instance="a", cpu="1"} 100 + {instance="c"} 300 + +# group_right with fill_right: fill missing "many" side series. +eval instant at 0m node_meta * on(instance) group_right fill_right(0) cpu_info + {instance="a", cpu="0"} 100 + {instance="a", cpu="1"} 100 + {instance="b", cpu="0"} 0 + +# group_right with fill on both sides. +eval instant at 0m node_meta * on(instance) group_right fill(1) cpu_info + {instance="a", cpu="0"} 100 + {instance="a", cpu="1"} 100 + {instance="b", cpu="0"} 1 + {instance="c"} 300 + +# ---------- fill with group_left/group_right and extra labels ---------- + +clear + +load 5m + requests{method="GET", status="200"} 100 + requests{method="POST", status="200"} 200 + limits{status="200", owner="team-a"} 1000 + limits{status="500", owner="team-b"} 50 + +# group_left with extra label and fill_right. +# Note: when filling the "one" side, the joined label cannot be filled. +eval instant at 0m requests + on(status) group_left(owner) fill_right(0) limits + {method="GET", status="200", owner="team-a"} 1100 + {method="POST", status="200", owner="team-a"} 1200 + +# ---------- Edge cases ---------- + +clear + +load 5m + only_left{label="a"} 10 + only_left{label="b"} 20 + only_right{label="c"} 30 + only_right{label="d"} 40 + +# No overlap at all - fill creates all results. +eval instant at 0m only_left + fill(0) only_right + {label="a"} 10 + {label="b"} 20 + {label="c"} 30 + {label="d"} 40 + +# No overlap - fill_left only creates right side results. +eval instant at 0m only_left + fill_left(0) only_right + {label="c"} 30 + {label="d"} 40 + +# No overlap - fill_right only creates left side results. +eval instant at 0m only_left + fill_right(0) only_right + {label="a"} 10 + {label="b"} 20 + +# Complete overlap - fill has no effect. +clear + +load 5m + complete_left{label="a"} 10 + complete_left{label="b"} 20 + complete_right{label="a"} 100 + complete_right{label="b"} 200 + +eval instant at 0m complete_left + fill(99) complete_right + {label="a"} 110 + {label="b"} 220 + +# ---------- fill with range queries ---------- + +clear + +load 5m + range_left{label="a"} 1 2 3 4 5 + range_left{label="b"} 10 20 30 40 50 + range_right{label="a"} 100 200 300 400 500 + range_right{label="c"} 1000 2000 3000 4000 5000 + +eval range from 0 to 20m step 5m range_left + fill(0) range_right + {label="a"} 101 202 303 404 505 + {label="b"} 10 20 30 40 50 + {label="c"} 1000 2000 3000 4000 5000 + +eval range from 0 to 20m step 5m range_left + fill_right(0) range_right + {label="a"} 101 202 303 404 505 + {label="b"} 10 20 30 40 50 + +eval range from 0 to 20m step 5m range_left + fill_left(0) range_right + {label="a"} 101 202 303 404 505 + {label="c"} 1000 2000 3000 4000 5000 + +# Range queries with intermittently present series. +clear + +load 5m + intermittent_left{label="a"} 1 _ 3 _ 5 + intermittent_left{label="b"} _ 20 _ 40 _ + intermittent_right{label="a"} _ 200 _ 400 _ + intermittent_right{label="b"} 100 _ 300 _ 500 + intermittent_right{label="c"} 1000 _ _ 4000 5000 + +# When both sides have the same label but are present at different times, +# fill creates results at all timestamps where at least one side is present. +eval range from 0 to 20m step 5m intermittent_left + fill(0) intermittent_right + {label="a"} 1 200 3 400 5 + {label="b"} 100 20 300 40 500 + {label="c"} 1000 _ _ 4000 5000 + +# fill_right only fills the right side when it's missing. +# Output only exists when left side is present (right side filled with 0 if missing). +eval range from 0 to 20m step 5m intermittent_left + fill_right(0) intermittent_right + {label="a"} 1 _ 3 _ 5 + {label="b"} _ 20 _ 40 _ + +# fill_left only fills the left side when it's missing. +# Output only exists when right side is present (left side filled with 0 if missing). +eval range from 0 to 20m step 5m intermittent_left + fill_left(0) intermittent_right + {label="a"} _ 200 _ 400 _ + {label="b"} 100 _ 300 _ 500 + {label="c"} 1000 _ _ 4000 5000 + +# ---------- fill with vectors where one side is empty ---------- + +clear + +load 5m + non_empty{label="a"} 10 + non_empty{label="b"} 20 + +# Empty right side - fill_right has no effect (nothing to add). +eval instant at 0m non_empty + fill_right(0) nonexistent + {label="a"} 10 + {label="b"} 20 + +# Empty right side - fill_left creates nothing (no right side labels to use). +eval instant at 0m non_empty + fill_left(0) nonexistent + +# Empty left side - fill_left has no effect. +eval instant at 0m nonexistent + fill_left(0) non_empty + {label="a"} 10 + {label="b"} 20 + +# Empty left side - fill_right creates nothing. +eval instant at 0m nonexistent + fill_right(0) non_empty + +# fill both sides with one side empty. +eval instant at 0m non_empty + fill(0) nonexistent + {label="a"} 10 + {label="b"} 20 + +eval instant at 0m nonexistent + fill(0) non_empty + {label="a"} 10 + {label="b"} 20 From ce26370eeb2e19bdfac68c40a1f21913a046fddd Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Wed, 10 Dec 2025 20:07:43 +0100 Subject: [PATCH 029/165] Add PromLens binop matching explain view tests Signed-off-by: Julius Volz --- web/ui/mantine-ui/src/promql/binOp.test.ts | 431 +++++++++++++++++++++ web/ui/mantine-ui/src/promql/binOp.ts | 4 +- 2 files changed, 433 insertions(+), 2 deletions(-) diff --git a/web/ui/mantine-ui/src/promql/binOp.test.ts b/web/ui/mantine-ui/src/promql/binOp.test.ts index 9c5d59a94c..76dd24fa79 100644 --- a/web/ui/mantine-ui/src/promql/binOp.test.ts +++ b/web/ui/mantine-ui/src/promql/binOp.test.ts @@ -2163,6 +2163,437 @@ const testCases: TestCase[] = [ numGroups: 2, }, }, + { + // metric_a - fill(0) metric_b + desc: "subtraction with fill(0) but no missing series", + op: binaryOperatorType.sub, + matching: { + card: vectorMatchCardinality.oneToOne, + on: false, + include: [], + labels: [], + fillValues: { lhs: 0, rhs: 0 }, + }, + lhs: testMetricA, + rhs: testMetricB, + result: { + groups: { + [fnv1a(["a", "x", "same"])]: { + groupLabels: { label1: "a", label2: "x", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "a", + label2: "x", + same: "same", + }, + value: [0, "1"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "a", + label2: "x", + same: "same", + }, + value: [0, "10"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "a", label2: "x", same: "same" }, + value: [0, "-9"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["a", "y", "same"])]: { + groupLabels: { label1: "a", label2: "y", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "a", + label2: "y", + same: "same", + }, + value: [0, "2"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "a", + label2: "y", + same: "same", + }, + value: [0, "20"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "a", label2: "y", same: "same" }, + value: [0, "-18"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["b", "x", "same"])]: { + groupLabels: { label1: "b", label2: "x", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "b", + label2: "x", + same: "same", + }, + value: [0, "3"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "b", + label2: "x", + same: "same", + }, + value: [0, "30"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "b", label2: "x", same: "same" }, + value: [0, "-27"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["b", "y", "same"])]: { + groupLabels: { label1: "b", label2: "y", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "b", + label2: "y", + same: "same", + }, + value: [0, "4"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "b", + label2: "y", + same: "same", + }, + value: [0, "40"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "b", label2: "y", same: "same" }, + value: [0, "-36"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + }, + numGroups: 4, + }, + }, + { + // metric_a[0..2] - fill_left(23) fill_right(42) metric_b[1...3] + desc: "subtraction with different fill values and missing series on each side", + op: binaryOperatorType.sub, + matching: { + card: vectorMatchCardinality.oneToOne, + on: false, + include: [], + labels: [], + fillValues: { lhs: 23, rhs: 42 }, + }, + lhs: testMetricA.slice(0, 3), + rhs: testMetricB.slice(1, 4), + result: { + groups: { + [fnv1a(["a", "x", "same"])]: { + groupLabels: { label1: "a", label2: "x", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "a", + label2: "x", + same: "same", + }, + value: [0, "1"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + label1: "a", + label2: "x", + same: "same", + }, + value: [0, "42"], + filled: true, + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "a", label2: "x", same: "same" }, + value: [0, "-41"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["a", "y", "same"])]: { + groupLabels: { label1: "a", label2: "y", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "a", + label2: "y", + same: "same", + }, + value: [0, "2"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "a", + label2: "y", + same: "same", + }, + value: [0, "20"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "a", label2: "y", same: "same" }, + value: [0, "-18"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["b", "x", "same"])]: { + groupLabels: { label1: "b", label2: "x", same: "same" }, + lhs: [ + { + metric: { + __name__: "metric_a", + label1: "b", + label2: "x", + same: "same", + }, + value: [0, "3"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "b", + label2: "x", + same: "same", + }, + value: [0, "30"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "b", label2: "x", same: "same" }, + value: [0, "-27"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + [fnv1a(["b", "y", "same"])]: { + groupLabels: { label1: "b", label2: "y", same: "same" }, + lhs: [ + { + metric: { + label1: "b", + label2: "y", + same: "same", + }, + filled: true, + value: [0, "23"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { + __name__: "metric_b", + label1: "b", + label2: "y", + same: "same", + }, + value: [0, "40"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "b", label2: "y", same: "same" }, + value: [0, "-17"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + }, + numGroups: 4, + }, + }, + { + // metric_b[0...1] - on(label1) group_left fill(0) metric_c + desc: "many-to-one matching with matching labels specified, group_left, and fill specified", + op: binaryOperatorType.sub, + matching: { + card: vectorMatchCardinality.manyToOne, + on: true, + include: [], + labels: ["label1"], + fillValues: { lhs: 0, rhs: 0 }, + }, + lhs: testMetricB.slice(0, 2), + rhs: testMetricC, + result: { + groups: { + [fnv1a(["a"])]: { + groupLabels: { label1: "a" }, + lhs: [ + { + metric: { + __name__: "metric_b", + label1: "a", + label2: "x", + same: "same", + }, + value: [0, "10"], + }, + { + metric: { + __name__: "metric_b", + label1: "a", + label2: "y", + same: "same", + }, + value: [0, "20"], + }, + ], + lhsCount: 2, + rhs: [ + { + metric: { __name__: "metric_c", label1: "a" }, + value: [0, "100"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "a", label2: "x", same: "same" }, + value: [0, "-90"], + }, + manySideIdx: 0, + }, + { + sample: { + metric: { label1: "a", label2: "y", same: "same" }, + value: [0, "-80"], + }, + manySideIdx: 1, + }, + ], + error: null, + }, + [fnv1a(["b"])]: { + groupLabels: { label1: "b" }, + lhs: [ + { + metric: { + label1: "b", + }, + filled: true, + value: [0, "0"], + }, + ], + lhsCount: 1, + rhs: [ + { + metric: { __name__: "metric_c", label1: "b" }, + value: [0, "200"], + }, + ], + rhsCount: 1, + result: [ + { + sample: { + metric: { label1: "b" }, + value: [0, "-200"], + }, + manySideIdx: 0, + }, + ], + error: null, + }, + }, + numGroups: 2, + }, + }, { // metric_a and metric b desc: "and operator with no matching labels and matching groups", diff --git a/web/ui/mantine-ui/src/promql/binOp.ts b/web/ui/mantine-ui/src/promql/binOp.ts index f583bf81bb..9ebee90f64 100644 --- a/web/ui/mantine-ui/src/promql/binOp.ts +++ b/web/ui/mantine-ui/src/promql/binOp.ts @@ -347,7 +347,7 @@ export const computeVectorVectorBinOp = ( Object.values(groups).forEach((mg) => { if (mg.lhs.length === 0 && matching.fillValues.lhs !== null) { mg.lhs.push({ - metric: {}, + metric: mg.groupLabels, value: [0, formatPrometheusFloat(matching.fillValues.lhs as number)], filled: true, }); @@ -355,7 +355,7 @@ export const computeVectorVectorBinOp = ( } if (mg.rhs.length === 0 && matching.fillValues.rhs !== null) { mg.rhs.push({ - metric: {}, + metric: mg.groupLabels, value: [0, formatPrometheusFloat(matching.fillValues.rhs as number)], filled: true, }); From 4c9795221073defa19d662c366c0ff1f0fca0e97 Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Thu, 11 Dec 2025 12:29:48 +0100 Subject: [PATCH 030/165] Document new fill binop modifiers Signed-off-by: Julius Volz --- docs/querying/operators.md | 117 ++++++++++++++++++++++++++----------- 1 file changed, 82 insertions(+), 35 deletions(-) diff --git a/docs/querying/operators.md b/docs/querying/operators.md index b320d8e86e..c5b01aff71 100644 --- a/docs/querying/operators.md +++ b/docs/querying/operators.md @@ -47,9 +47,9 @@ special values like `NaN`, `+Inf`, and `-Inf`. scalar that is the result of the operator applied to both scalar operands. **Between an instant vector and a scalar**, the operator is applied to the -value of every data sample in the vector. +value of every data sample in the vector. -If the data sample is a float, the operation is performed between that float and the scalar. +If the data sample is a float, the operation is performed between that float and the scalar. For example, if an instant vector of float samples is multiplied by 2, the result is another vector of float samples in which every sample value of the original vector is multiplied by 2. @@ -81,8 +81,9 @@ following: **Between two instant vectors**, a binary arithmetic operator is applied to each entry in the LHS vector and its [matching element](#vector-matching) in the RHS vector. The result is propagated into the result vector with the -grouping labels becoming the output label set. Entries for which no matching -entry in the right-hand vector can be found are not part of the result. +grouping labels becoming the output label set. By default, series for which +no matching entry in the opposite vector can be found are not part of the +result. This behavior can be adjusted using [fill modifiers](#filling-in-missing-matches). If two float samples are matched, the arithmetic operator is applied to the two input values. @@ -97,7 +98,7 @@ If two histogram samples are matched, only `+` and `-` are valid operations, each adding or subtracting all matching bucket populations and the count and the sum of observations. All other operations result in the removal of the corresponding element from the output vector, flagged by an info-level -annotation. The `+` and -` operations should generally only be applied to gauge +annotation. The `+` and `-` operations should generally only be applied to gauge histograms, but PromQL allows them for counter histograms, too, to cover specific use cases, for which special attention is required to avoid problems with unaligned counter resets. (Certain incompatibilities of counter resets can @@ -106,7 +107,7 @@ two counter histograms results in a counter histogram. All other combination of operands and all subtractions result in a gauge histogram. **In any arithmetic binary operation involving vectors**, the metric name is -dropped. This occurs even if `__name__` is explicitly mentioned in `on` +dropped. This occurs even if `__name__` is explicitly mentioned in `on` (see https://github.com/prometheus/prometheus/issues/16631 for further discussion). **For any arithmetic binary operation that may result in a negative @@ -156,9 +157,9 @@ info-level annotation. applied to matching entries. Vector elements for which the expression is not true or which do not find a match on the other side of the expression get dropped from the result, while the others are propagated into a result vector -with the grouping labels becoming the output label set. +with the grouping labels becoming the output label set. -Matches between two float samples work as usual. +Matches between two float samples work as usual. Matches between a float sample and a histogram sample are invalid, and the corresponding element is removed from the result vector, flagged by an info-level @@ -171,8 +172,8 @@ comparison binary operations are again invalid. modifier changes the behavior in the following ways: * Vector elements which find a match on the other side of the expression but for - which the expression is false instead have the value `0` and vector elements - that do find a match and for which the expression is true have the value `1`. + which the expression is false instead have the value `0`, and vector elements + that do find a match and for which the expression is true have the value `1`. (Note that elements with no match or invalid operations involving histogram samples still return no result rather than the value `0`.) * The metric name is dropped. @@ -216,11 +217,10 @@ matching behavior: One-to-one and many-to-one/one-to-many. ### Vector matching keywords -These vector matching keywords allow for matching between series with different label sets -providing: +These vector matching keywords allow for matching between series with different label sets: -* `on` -* `ignoring` +* `on(