2017-04-10 20:59:45 +02:00
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
2016-12-14 15:47:05 +01:00
package tsdb
import (
2017-07-05 16:19:28 +02:00
"fmt"
2017-04-14 00:36:14 +05:30
"math"
2017-04-13 19:57:31 +05:30
"math/rand"
2016-12-19 11:44:11 +01:00
"sort"
2016-12-14 15:47:05 +01:00
"testing"
2017-11-30 15:34:49 +01:00
"github.com/pkg/errors"
"github.com/prometheus/tsdb/chunkenc"
2017-04-09 19:30:25 +05:30
"github.com/prometheus/tsdb/chunks"
2017-11-30 15:34:49 +01:00
"github.com/prometheus/tsdb/index"
2017-04-04 11:27:26 +02:00
"github.com/prometheus/tsdb/labels"
2017-12-06 17:06:14 -08:00
"github.com/prometheus/tsdb/testutil"
2016-12-14 15:47:05 +01:00
)
2018-09-21 11:07:35 +03:00
type mockSeriesSet struct {
next func ( ) bool
series func ( ) Series
err func ( ) error
}
func ( m * mockSeriesSet ) Next ( ) bool { return m . next ( ) }
func ( m * mockSeriesSet ) At ( ) Series { return m . series ( ) }
func ( m * mockSeriesSet ) Err ( ) error { return m . err ( ) }
func newMockSeriesSet ( list [ ] Series ) * mockSeriesSet {
i := - 1
return & mockSeriesSet {
next : func ( ) bool {
i ++
return i < len ( list )
} ,
series : func ( ) Series {
return list [ i ]
} ,
err : func ( ) error { return nil } ,
}
}
2016-12-19 11:44:11 +01:00
type mockSeriesIterator struct {
2017-01-02 13:27:52 +01:00
seek func ( int64 ) bool
at func ( ) ( int64 , float64 )
next func ( ) bool
err func ( ) error
2016-12-19 11:44:11 +01:00
}
2017-01-02 13:27:52 +01:00
func ( m * mockSeriesIterator ) Seek ( t int64 ) bool { return m . seek ( t ) }
func ( m * mockSeriesIterator ) At ( ) ( int64 , float64 ) { return m . at ( ) }
func ( m * mockSeriesIterator ) Next ( ) bool { return m . next ( ) }
func ( m * mockSeriesIterator ) Err ( ) error { return m . err ( ) }
2016-12-19 11:44:11 +01:00
type mockSeries struct {
2016-12-21 15:12:26 +01:00
labels func ( ) labels . Labels
2016-12-19 11:44:11 +01:00
iterator func ( ) SeriesIterator
}
2017-05-03 22:45:28 +05:30
func newSeries ( l map [ string ] string , s [ ] sample ) Series {
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
2016-12-21 15:12:26 +01:00
func ( m * mockSeries ) Labels ( ) labels . Labels { return m . labels ( ) }
2016-12-19 11:44:11 +01:00
func ( m * mockSeries ) Iterator ( ) SeriesIterator { return m . iterator ( ) }
type listSeriesIterator struct {
list [ ] sample
idx int
}
func newListSeriesIterator ( list [ ] sample ) * listSeriesIterator {
return & listSeriesIterator { list : list , idx : - 1 }
}
2017-01-02 13:27:52 +01:00
func ( it * listSeriesIterator ) At ( ) ( int64 , float64 ) {
2016-12-19 11:44:11 +01:00
s := it . list [ it . idx ]
return s . t , s . v
}
func ( it * listSeriesIterator ) Next ( ) bool {
it . idx ++
return it . idx < len ( it . list )
}
func ( it * listSeriesIterator ) Seek ( t int64 ) bool {
2016-12-21 16:06:33 +01:00
if it . idx == - 1 {
it . idx = 0
}
2016-12-19 11:44:11 +01:00
// Do binary search between current position and end.
it . idx = sort . Search ( len ( it . list ) - it . idx , func ( i int ) bool {
s := it . list [ i + it . idx ]
return s . t >= t
} )
2016-12-21 16:06:33 +01:00
2016-12-19 11:44:11 +01:00
return it . idx < len ( it . list )
}
func ( it * listSeriesIterator ) Err ( ) error {
return nil
}
2017-03-14 15:24:08 +01:00
func TestMergedSeriesSet ( t * testing . T ) {
2016-12-19 11:44:11 +01:00
cases := [ ] struct {
// The input sets in order (samples in series in b are strictly
// after those in a).
a , b SeriesSet
2017-01-06 08:08:02 +01:00
// The composition of a and b in the partition series set must yield
2016-12-19 11:44:11 +01:00
// results equivalent to the result series set.
exp SeriesSet
} {
{
2018-09-21 11:07:35 +03:00
a : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
b : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
} , [ ] sample {
{ t : 2 , v : 2 } ,
} ) ,
newSeries ( map [ string ] string {
"b" : "b" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
{ t : 2 , v : 2 } ,
} ) ,
newSeries ( map [ string ] string {
"b" : "b" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
} ) ,
} ) ,
} ,
2017-01-03 19:02:42 +01:00
{
2018-09-21 11:07:35 +03:00
a : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
} ) ,
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "localhost:9090" ,
} , [ ] sample {
{ t : 1 , v : 2 } ,
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
b : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
} , [ ] sample {
{ t : 2 , v : 1 } ,
} ) ,
newSeries ( map [ string ] string {
"handler" : "query" ,
"instance" : "localhost:9090" ,
} , [ ] sample {
{ t : 2 , v : 2 } ,
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
} , [ ] sample {
{ t : 1 , v : 1 } ,
{ t : 2 , v : 1 } ,
} ) ,
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "localhost:9090" ,
} , [ ] sample {
{ t : 1 , v : 2 } ,
} ) ,
newSeries ( map [ string ] string {
"handler" : "query" ,
"instance" : "localhost:9090" ,
} , [ ] sample {
{ t : 2 , v : 2 } ,
} ) ,
} ) ,
} ,
2016-12-19 11:44:11 +01:00
}
Outer :
for _ , c := range cases {
2017-03-14 15:24:08 +01:00
res := newMergedSeriesSet ( c . a , c . b )
2016-12-19 11:44:11 +01:00
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2016-12-19 11:44:11 +01:00
if ! eok {
continue Outer
}
2017-01-02 13:27:52 +01:00
sexp := c . exp . At ( )
sres := res . At ( )
2016-12-19 11:44:11 +01:00
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2016-12-19 11:44:11 +01:00
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2016-12-19 11:44:11 +01:00
}
}
}
func expandSeriesIterator ( it SeriesIterator ) ( r [ ] sample , err error ) {
for it . Next ( ) {
2017-01-02 13:27:52 +01:00
t , v := it . At ( )
2016-12-19 11:44:11 +01:00
r = append ( r , sample { t : t , v : v } )
}
return r , it . Err ( )
}
2017-04-09 19:30:25 +05:30
2018-10-12 10:45:19 +01:00
type seriesSamples struct {
2017-04-13 19:57:31 +05:30
lset map [ string ] string
chunks [ ] [ ] sample
2018-10-12 10:45:19 +01:00
}
// Index: labels -> postings -> chunkMetas -> chunkRef
// ChunkReader: ref -> vals
func createIdxChkReaders ( tc [ ] seriesSamples ) ( IndexReader , ChunkReader ) {
2017-04-13 19:57:31 +05:30
sort . Slice ( tc , func ( i , j int ) bool {
return labels . Compare ( labels . FromMap ( tc [ i ] . lset ) , labels . FromMap ( tc [ i ] . lset ) ) < 0
} )
2017-11-30 15:34:49 +01:00
postings := index . NewMemPostings ( )
chkReader := mockChunkReader ( make ( map [ uint64 ] chunkenc . Chunk ) )
2017-07-21 01:37:52 -07:00
lblIdx := make ( map [ string ] stringset )
2017-04-13 19:57:31 +05:30
mi := newMockIndex ( )
for i , s := range tc {
2017-05-22 13:31:57 +05:30
i = i + 1 // 0 is not a valid posting.
2017-11-30 15:34:49 +01:00
metas := make ( [ ] chunks . Meta , 0 , len ( s . chunks ) )
2017-04-13 19:57:31 +05:30
for _ , chk := range s . chunks {
// Collisions can be there, but for tests, its fine.
ref := rand . Uint64 ( )
2017-11-30 15:34:49 +01:00
metas = append ( metas , chunks . Meta {
2017-04-13 19:57:31 +05:30
MinTime : chk [ 0 ] . t ,
MaxTime : chk [ len ( chk ) - 1 ] . t ,
Ref : ref ,
} )
2017-11-30 15:34:49 +01:00
chunk := chunkenc . NewXORChunk ( )
2017-04-13 19:57:31 +05:30
app , _ := chunk . Appender ( )
for _ , smpl := range chk {
app . Append ( smpl . t , smpl . v )
}
chkReader [ ref ] = chunk
}
2017-07-21 01:37:52 -07:00
ls := labels . FromMap ( s . lset )
2017-09-04 16:08:38 +02:00
mi . AddSeries ( uint64 ( i ) , ls , metas ... )
2017-04-13 19:57:31 +05:30
2017-11-30 15:34:49 +01:00
postings . Add ( uint64 ( i ) , ls )
2017-07-21 01:37:52 -07:00
2017-09-05 11:45:18 +02:00
for _ , l := range ls {
2017-07-21 01:37:52 -07:00
vs , present := lblIdx [ l . Name ]
if ! present {
vs = stringset { }
lblIdx [ l . Name ] = vs
}
vs . set ( l . Value )
2017-04-13 19:57:31 +05:30
}
}
2017-07-21 01:37:52 -07:00
for l , vs := range lblIdx {
mi . WriteLabelIndex ( [ ] string { l } , vs . slice ( ) )
}
2017-11-30 15:34:49 +01:00
postings . Iter ( func ( l labels . Label , p index . Postings ) error {
return mi . WritePostings ( l . Name , l . Value , p )
} )
2017-04-13 19:57:31 +05:30
return mi , chkReader
}
func TestBlockQuerier ( t * testing . T ) {
2017-04-14 00:36:14 +05:30
newSeries := func ( l map [ string ] string , s [ ] sample ) Series {
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
2017-04-13 19:57:31 +05:30
2017-04-22 01:38:26 +05:30
type query struct {
mint , maxt int64
ms [ ] labels . Matcher
exp SeriesSet
}
cases := struct {
2018-10-12 10:45:19 +01:00
data [ ] seriesSamples
2017-04-22 01:38:26 +05:30
queries [ ] query
2017-04-13 19:57:31 +05:30
} {
2018-10-12 10:45:19 +01:00
data : [ ] seriesSamples {
2017-04-13 19:57:31 +05:30
{
lset : map [ string ] string {
"a" : "a" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 2 } , { 2 , 3 } , { 3 , 4 } ,
} ,
{
{ 5 , 2 } , { 6 , 3 } , { 7 , 4 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
2017-04-14 00:36:14 +05:30
{ 5 , 3 } , { 6 , 6 } ,
2017-04-13 19:57:31 +05:30
} ,
} ,
} ,
{
lset : map [ string ] string {
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 3 } , { 2 , 2 } , { 3 , 6 } ,
} ,
{
{ 5 , 1 } , { 6 , 7 } , { 7 , 2 } ,
} ,
} ,
} ,
2017-07-21 01:37:52 -07:00
{
lset : map [ string ] string {
"p" : "abcd" ,
"x" : "xyz" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 2 } , { 2 , 3 } , { 3 , 4 } ,
} ,
{
{ 5 , 2 } , { 6 , 3 } , { 7 , 4 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"a" : "ab" ,
"p" : "abce" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
{ 5 , 3 } , { 6 , 6 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"p" : "xyz" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
{ 4 , 4 } , { 5 , 5 } , { 6 , 6 } ,
} ,
} ,
} ,
2017-04-13 19:57:31 +05:30
} ,
2017-04-22 01:38:26 +05:30
queries : [ ] query {
{
mint : 0 ,
maxt : 0 ,
ms : [ ] labels . Matcher { } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 0 ,
maxt : 0 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 1 ,
maxt : 0 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 2 ,
maxt : 6 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-04-22 01:38:26 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
} ,
[ ] sample { { 2 , 3 } , { 3 , 4 } , { 5 , 2 } , { 6 , 3 } } ,
) ,
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
[ ] sample { { 2 , 2 } , { 3 , 3 } , { 5 , 3 } , { 6 , 6 } } ,
) ,
} ) ,
} ,
2017-07-21 01:37:52 -07:00
{
mint : 2 ,
maxt : 6 ,
ms : [ ] labels . Matcher { labels . NewPrefixMatcher ( "p" , "abc" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-07-21 01:37:52 -07:00
newSeries ( map [ string ] string {
"a" : "ab" ,
"p" : "abce" ,
} ,
[ ] sample { { 2 , 2 } , { 3 , 3 } , { 5 , 3 } , { 6 , 6 } } ,
) ,
2017-08-05 13:31:48 +02:00
newSeries ( map [ string ] string {
"p" : "abcd" ,
"x" : "xyz" ,
} ,
[ ] sample { { 2 , 3 } , { 3 , 4 } , { 5 , 2 } , { 6 , 3 } } ,
) ,
2017-07-21 01:37:52 -07:00
} ) ,
} ,
2017-04-14 00:36:14 +05:30
} ,
2017-04-13 19:57:31 +05:30
}
Outer :
2017-12-06 16:27:09 -08:00
for _ , c := range cases . queries {
2017-04-22 01:38:26 +05:30
ir , cr := createIdxChkReaders ( cases . data )
2017-04-13 19:57:31 +05:30
querier := & blockQuerier {
2017-05-17 14:49:42 +05:30
index : ir ,
chunks : cr ,
2018-06-08 19:52:01 +08:00
tombstones : NewMemTombstones ( ) ,
2017-04-13 19:57:31 +05:30
mint : c . mint ,
maxt : c . maxt ,
}
2017-11-13 12:16:58 +01:00
res , err := querier . Select ( c . ms ... )
2017-12-06 17:06:14 -08:00
testutil . Ok ( t , err )
2017-04-13 19:57:31 +05:30
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2017-04-13 19:57:31 +05:30
if ! eok {
continue Outer
}
sexp := c . exp . At ( )
sres := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2017-04-13 19:57:31 +05:30
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-13 19:57:31 +05:30
}
}
return
}
2017-04-09 19:30:25 +05:30
2017-05-22 13:31:57 +05:30
func TestBlockQuerierDelete ( t * testing . T ) {
newSeries := func ( l map [ string ] string , s [ ] sample ) Series {
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
type query struct {
mint , maxt int64
ms [ ] labels . Matcher
exp SeriesSet
}
cases := struct {
2018-10-12 10:45:19 +01:00
data [ ] seriesSamples
2017-05-22 13:31:57 +05:30
2017-11-13 13:32:24 +01:00
tombstones TombstoneReader
2017-05-22 13:31:57 +05:30
queries [ ] query
} {
2018-10-12 10:45:19 +01:00
data : [ ] seriesSamples {
2017-05-22 13:31:57 +05:30
{
lset : map [ string ] string {
"a" : "a" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 2 } , { 2 , 3 } , { 3 , 4 } ,
} ,
{
{ 5 , 2 } , { 6 , 3 } , { 7 , 4 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
{ 4 , 15 } , { 5 , 3 } , { 6 , 6 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 3 } , { 2 , 2 } , { 3 , 6 } ,
} ,
{
{ 5 , 1 } , { 6 , 7 } , { 7 , 2 } ,
} ,
} ,
} ,
} ,
2018-07-10 21:24:13 +08:00
tombstones : & memTombstones { intvlGroups : map [ uint64 ] Intervals {
2018-07-06 20:30:27 +08:00
1 : Intervals { { 1 , 3 } } ,
2 : Intervals { { 1 , 3 } , { 6 , 10 } } ,
3 : Intervals { { 6 , 10 } } ,
} } ,
2017-05-22 13:31:57 +05:30
queries : [ ] query {
{
mint : 2 ,
maxt : 7 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
} ,
[ ] sample { { 5 , 2 } , { 6 , 3 } , { 7 , 4 } } ,
) ,
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
[ ] sample { { 4 , 15 } , { 5 , 3 } } ,
) ,
} ) ,
} ,
{
mint : 2 ,
maxt : 7 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "b" , "b" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
[ ] sample { { 4 , 15 } , { 5 , 3 } } ,
) ,
newSeries ( map [ string ] string {
"b" : "b" ,
} ,
[ ] sample { { 2 , 2 } , { 3 , 6 } , { 5 , 1 } } ,
) ,
} ) ,
} ,
{
mint : 1 ,
maxt : 4 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
[ ] sample { { 4 , 15 } } ,
) ,
} ) ,
} ,
{
mint : 1 ,
maxt : 3 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-05-22 13:31:57 +05:30
} ,
} ,
}
Outer :
for _ , c := range cases . queries {
ir , cr := createIdxChkReaders ( cases . data )
querier := & blockQuerier {
index : ir ,
chunks : cr ,
2017-05-24 11:24:24 +05:30
tombstones : cases . tombstones ,
2017-05-22 13:31:57 +05:30
mint : c . mint ,
maxt : c . maxt ,
}
2017-11-13 12:16:58 +01:00
res , err := querier . Select ( c . ms ... )
2017-12-06 17:06:14 -08:00
testutil . Ok ( t , err )
2017-05-22 13:31:57 +05:30
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2017-05-22 13:31:57 +05:30
if ! eok {
continue Outer
}
sexp := c . exp . At ( )
sres := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2017-05-22 13:31:57 +05:30
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-05-22 13:31:57 +05:30
}
}
return
}
2017-04-09 19:30:25 +05:30
func TestBaseChunkSeries ( t * testing . T ) {
type refdSeries struct {
lset labels . Labels
2017-11-30 15:34:49 +01:00
chunks [ ] chunks . Meta
2017-04-09 19:30:25 +05:30
2017-09-04 16:08:38 +02:00
ref uint64
2017-04-09 19:30:25 +05:30
}
cases := [ ] struct {
series [ ] refdSeries
// Postings should be in the sorted order of the the series
2017-09-04 16:08:38 +02:00
postings [ ] uint64
2017-04-09 19:30:25 +05:30
expIdxs [ ] int
} {
{
series : [ ] refdSeries {
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 29 } , { Ref : 45 } , { Ref : 245 } , { Ref : 123 } , { Ref : 4232 } , { Ref : 5344 } ,
{ Ref : 121 } ,
} ,
ref : 12 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } , { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 82 } , { Ref : 23 } , { Ref : 234 } , { Ref : 65 } , { Ref : 26 } ,
} ,
ref : 10 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "c" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta { { Ref : 8282 } } ,
2017-04-09 19:30:25 +05:30
ref : 1 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 829 } , { Ref : 239 } , { Ref : 2349 } , { Ref : 659 } , { Ref : 269 } ,
} ,
ref : 108 ,
} ,
} ,
2017-10-11 09:33:35 +02:00
postings : [ ] uint64 { 12 , 13 , 10 , 108 } , // 13 doesn't exist and should just be skipped over.
expIdxs : [ ] int { 0 , 1 , 3 } ,
2017-04-09 19:30:25 +05:30
} ,
{
series : [ ] refdSeries {
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } , { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 82 } , { Ref : 23 } , { Ref : 234 } , { Ref : 65 } , { Ref : 26 } ,
} ,
ref : 10 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "c" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta { { Ref : 8282 } } ,
2017-10-11 09:33:35 +02:00
ref : 3 ,
2017-04-09 19:30:25 +05:30
} ,
} ,
2017-09-04 16:08:38 +02:00
postings : [ ] uint64 { } ,
2017-10-11 09:33:35 +02:00
expIdxs : [ ] int { } ,
2017-04-09 19:30:25 +05:30
} ,
}
for _ , tc := range cases {
mi := newMockIndex ( )
for _ , s := range tc . series {
mi . AddSeries ( s . ref , s . lset , s . chunks ... )
}
bcs := & baseChunkSeries {
2017-11-30 15:34:49 +01:00
p : index . NewListPostings ( tc . postings ) ,
2017-05-17 14:49:42 +05:30
index : mi ,
2018-06-08 19:52:01 +08:00
tombstones : NewMemTombstones ( ) ,
2017-04-09 19:30:25 +05:30
}
i := 0
for bcs . Next ( ) {
2017-05-17 14:49:42 +05:30
lset , chks , _ := bcs . At ( )
2017-04-09 19:30:25 +05:30
idx := tc . expIdxs [ i ]
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , tc . series [ idx ] . lset , lset )
testutil . Equals ( t , tc . series [ idx ] . chunks , chks )
2017-04-09 19:30:25 +05:30
i ++
}
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , len ( tc . expIdxs ) , i )
testutil . Ok ( t , bcs . Err ( ) )
2017-04-09 19:30:25 +05:30
}
return
}
// TODO: Remove after simpleSeries is merged
type itSeries struct {
si SeriesIterator
}
func ( s itSeries ) Iterator ( ) SeriesIterator { return s . si }
func ( s itSeries ) Labels ( ) labels . Labels { return labels . Labels { } }
2017-11-30 15:34:49 +01:00
func chunkFromSamples ( s [ ] sample ) chunks . Meta {
2017-04-09 19:30:25 +05:30
mint , maxt := int64 ( 0 ) , int64 ( 0 )
if len ( s ) > 0 {
mint , maxt = s [ 0 ] . t , s [ len ( s ) - 1 ] . t
}
2017-11-30 15:34:49 +01:00
c := chunkenc . NewXORChunk ( )
2017-04-09 19:30:25 +05:30
ca , _ := c . Appender ( )
for _ , s := range s {
ca . Append ( s . t , s . v )
}
2017-11-30 15:34:49 +01:00
return chunks . Meta {
2017-04-09 19:30:25 +05:30
MinTime : mint ,
MaxTime : maxt ,
2017-08-06 20:41:24 +02:00
Chunk : c ,
2017-04-09 19:30:25 +05:30
}
}
func TestSeriesIterator ( t * testing . T ) {
itcases := [ ] struct {
a , b , c [ ] sample
exp [ ] sample
2017-04-14 00:37:21 +05:30
mint , maxt int64
2017-04-09 19:30:25 +05:30
} {
{
a : [ ] sample { } ,
b : [ ] sample { } ,
c : [ ] sample { } ,
exp : [ ] sample { } ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
b : [ ] sample { } ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
exp : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } , { 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample { } ,
b : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
exp : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } , { 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 2 ,
maxt : 8 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
b : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 10 , 22 } , { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
exp : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 2 , 3 } , { 3 , 5 } , { 6 , 1 } , { 7 , 89 } , { 9 , 8 } , { 10 , 22 } , { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 6 ,
maxt : 10 ,
2017-04-09 19:30:25 +05:30
} ,
}
seekcases := [ ] struct {
a , b , c [ ] sample
seek int64
success bool
exp [ ] sample
2017-04-14 00:37:21 +05:30
mint , maxt int64
2017-04-09 19:30:25 +05:30
} {
{
a : [ ] sample { } ,
b : [ ] sample { } ,
c : [ ] sample { } ,
seek : 0 ,
success : false ,
exp : nil ,
} ,
{
a : [ ] sample {
{ 2 , 3 } ,
} ,
b : [ ] sample { } ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 10 ,
success : false ,
exp : nil ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample { } ,
b : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 1 , 2 } , { 3 , 5 } , { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 2 ,
success : true ,
exp : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 3 , 5 } , { 6 , 1 } , { 7 , 89 } , { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 5 ,
maxt : 8 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample {
{ 6 , 1 } ,
} ,
b : [ ] sample {
{ 9 , 8 } ,
} ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 10 , 22 } , { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 10 ,
success : true ,
exp : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 10 , 22 } , { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 10 ,
maxt : 203 ,
2017-04-09 19:30:25 +05:30
} ,
{
a : [ ] sample {
{ 6 , 1 } ,
} ,
b : [ ] sample {
{ 9 , 8 } ,
} ,
c : [ ] sample {
2017-04-14 00:37:21 +05:30
{ 10 , 22 } , { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 203 ,
success : true ,
exp : [ ] sample {
{ 203 , 3493 } ,
} ,
2017-04-14 00:37:21 +05:30
mint : 7 ,
maxt : 203 ,
2017-04-09 19:30:25 +05:30
} ,
}
t . Run ( "Chunk" , func ( t * testing . T ) {
for _ , tc := range itcases {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
chunkFromSamples ( tc . a ) ,
chunkFromSamples ( tc . b ) ,
chunkFromSamples ( tc . c ) ,
}
2017-05-22 16:42:36 +05:30
res := newChunkSeriesIterator ( chkMetas , nil , tc . mint , tc . maxt )
2017-04-14 00:37:21 +05:30
smplValid := make ( [ ] sample , 0 )
for _ , s := range tc . exp {
if s . t >= tc . mint && s . t <= tc . maxt {
smplValid = append ( smplValid , s )
}
}
exp := newListSeriesIterator ( smplValid )
2017-04-09 19:30:25 +05:30
smplExp , errExp := expandSeriesIterator ( exp )
smplRes , errRes := expandSeriesIterator ( res )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-09 19:30:25 +05:30
}
t . Run ( "Seek" , func ( t * testing . T ) {
2017-04-14 00:37:21 +05:30
extra := [ ] struct {
a , b , c [ ] sample
seek int64
success bool
exp [ ] sample
mint , maxt int64
} {
{
a : [ ] sample {
{ 6 , 1 } ,
} ,
b : [ ] sample {
{ 9 , 8 } ,
} ,
c : [ ] sample {
{ 10 , 22 } , { 203 , 3493 } ,
} ,
seek : 203 ,
success : false ,
exp : nil ,
mint : 2 ,
maxt : 202 ,
} ,
{
a : [ ] sample {
{ 6 , 1 } ,
} ,
b : [ ] sample {
{ 9 , 8 } ,
} ,
c : [ ] sample {
{ 10 , 22 } , { 203 , 3493 } ,
} ,
seek : 5 ,
success : true ,
exp : [ ] sample { { 10 , 22 } } ,
mint : 10 ,
maxt : 202 ,
} ,
}
seekcases2 := append ( seekcases , extra ... )
for _ , tc := range seekcases2 {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
chunkFromSamples ( tc . a ) ,
chunkFromSamples ( tc . b ) ,
chunkFromSamples ( tc . c ) ,
}
2017-05-22 16:42:36 +05:30
res := newChunkSeriesIterator ( chkMetas , nil , tc . mint , tc . maxt )
2017-04-14 00:37:21 +05:30
smplValid := make ( [ ] sample , 0 )
for _ , s := range tc . exp {
if s . t >= tc . mint && s . t <= tc . maxt {
smplValid = append ( smplValid , s )
}
}
exp := newListSeriesIterator ( smplValid )
2017-04-09 19:30:25 +05:30
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , tc . success , res . Seek ( tc . seek ) )
2017-04-09 19:30:25 +05:30
if tc . success {
// Init the list and then proceed to check.
remaining := exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , remaining == true , "" )
2017-04-09 19:30:25 +05:30
for remaining {
sExp , eExp := exp . At ( )
sRes , eRes := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eExp , eRes )
testutil . Equals ( t , sExp , sRes )
2017-04-09 19:30:25 +05:30
remaining = exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , remaining , res . Next ( ) )
2017-04-09 19:30:25 +05:30
}
}
}
} )
} )
t . Run ( "Chain" , func ( t * testing . T ) {
for _ , tc := range itcases {
a , b , c := itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) }
res := newChainedSeriesIterator ( a , b , c )
exp := newListSeriesIterator ( tc . exp )
smplExp , errExp := expandSeriesIterator ( exp )
smplRes , errRes := expandSeriesIterator ( res )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-09 19:30:25 +05:30
}
t . Run ( "Seek" , func ( t * testing . T ) {
for _ , tc := range seekcases {
a , b , c := itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) }
res := newChainedSeriesIterator ( a , b , c )
exp := newListSeriesIterator ( tc . exp )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , tc . success , res . Seek ( tc . seek ) )
2017-04-09 19:30:25 +05:30
if tc . success {
// Init the list and then proceed to check.
remaining := exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , remaining == true , "" )
2017-04-09 19:30:25 +05:30
for remaining {
sExp , eExp := exp . At ( )
sRes , eRes := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eExp , eRes )
testutil . Equals ( t , sExp , sRes )
2017-04-09 19:30:25 +05:30
remaining = exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , remaining , res . Next ( ) )
2017-04-09 19:30:25 +05:30
}
}
}
} )
} )
return
}
2017-05-01 14:33:56 +05:30
2017-06-13 13:21:22 +05:30
// Regression for: https://github.com/prometheus/tsdb/pull/97
2017-06-30 15:06:27 +02:00
func TestChunkSeriesIterator_DoubleSeek ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2017-06-13 13:21:22 +05:30
chunkFromSamples ( [ ] sample { } ) ,
chunkFromSamples ( [ ] sample { { 1 , 1 } , { 2 , 2 } , { 3 , 3 } } ) ,
chunkFromSamples ( [ ] sample { { 4 , 4 } , { 5 , 5 } } ) ,
}
res := newChunkSeriesIterator ( chkMetas , nil , 2 , 8 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , res . Seek ( 1 ) == true , "" )
testutil . Assert ( t , res . Seek ( 2 ) == true , "" )
2017-06-13 13:21:22 +05:30
ts , v := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 2 ) , ts )
testutil . Equals ( t , float64 ( 2 ) , v )
2017-06-13 13:21:22 +05:30
}
2017-06-30 15:06:27 +02:00
// Regression when seeked chunks were still found via binary search and we always
// skipped to the end when seeking a value in the current chunk.
func TestChunkSeriesIterator_SeekInCurrentChunk ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
metas := [ ] chunks . Meta {
2017-06-30 15:06:27 +02:00
chunkFromSamples ( [ ] sample { } ) ,
chunkFromSamples ( [ ] sample { { 1 , 2 } , { 3 , 4 } , { 5 , 6 } , { 7 , 8 } } ) ,
chunkFromSamples ( [ ] sample { } ) ,
}
it := newChunkSeriesIterator ( metas , nil , 1 , 7 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Next ( ) == true , "" )
2017-06-30 15:06:27 +02:00
ts , v := it . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 1 ) , ts )
testutil . Equals ( t , float64 ( 2 ) , v )
2017-06-30 15:06:27 +02:00
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Seek ( 4 ) == true , "" )
2017-06-30 15:06:27 +02:00
ts , v = it . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 5 ) , ts )
testutil . Equals ( t , float64 ( 6 ) , v )
2017-06-30 15:06:27 +02:00
}
2017-08-29 00:39:17 +02:00
// Regression when calling Next() with a time bounded to fit within two samples.
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
func TestChunkSeriesIterator_NextWithMinTime ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
metas := [ ] chunks . Meta {
2017-08-29 00:39:17 +02:00
chunkFromSamples ( [ ] sample { { 1 , 6 } , { 5 , 6 } , { 7 , 8 } } ) ,
}
it := newChunkSeriesIterator ( metas , nil , 2 , 4 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Next ( ) == false , "" )
2017-08-29 00:39:17 +02:00
}
2017-05-01 14:33:56 +05:30
func TestPopulatedCSReturnsValidChunkSlice ( t * testing . T ) {
lbls := [ ] labels . Labels { labels . New ( labels . Label { "a" , "b" } ) }
2017-11-30 15:34:49 +01:00
chunkMetas := [ ] [ ] chunks . Meta {
2017-05-01 14:33:56 +05:30
{
{ MinTime : 1 , MaxTime : 2 , Ref : 1 } ,
{ MinTime : 3 , MaxTime : 4 , Ref : 2 } ,
{ MinTime : 10 , MaxTime : 12 , Ref : 3 } ,
} ,
}
cr := mockChunkReader (
2017-11-30 15:34:49 +01:00
map [ uint64 ] chunkenc . Chunk {
1 : chunkenc . NewXORChunk ( ) ,
2 : chunkenc . NewXORChunk ( ) ,
3 : chunkenc . NewXORChunk ( ) ,
2017-05-01 14:33:56 +05:30
} ,
)
m := & mockChunkSeriesSet { l : lbls , cm : chunkMetas , i : - 1 }
p := & populatedChunkSeries {
set : m ,
chunks : cr ,
2017-05-01 15:01:17 +05:30
mint : 0 ,
maxt : 0 ,
2017-05-01 14:33:56 +05:30
}
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 15:01:17 +05:30
p . mint = 6
p . maxt = 9
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 15:01:17 +05:30
2017-05-03 22:45:28 +05:30
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
2017-11-30 15:34:49 +01:00
chunkMetas = [ ] [ ] chunks . Meta {
2017-05-03 22:45:28 +05:30
{
{ MinTime : 1 , MaxTime : 2 , Ref : 1 } ,
} ,
}
m = & mockChunkSeriesSet { l : lbls , cm : chunkMetas , i : - 1 }
p = & populatedChunkSeries {
set : m ,
chunks : cr ,
mint : 10 ,
maxt : 15 ,
}
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 14:33:56 +05:30
return
}
type mockChunkSeriesSet struct {
l [ ] labels . Labels
2017-11-30 15:34:49 +01:00
cm [ ] [ ] chunks . Meta
2017-05-01 14:33:56 +05:30
i int
}
func ( m * mockChunkSeriesSet ) Next ( ) bool {
if len ( m . l ) != len ( m . cm ) {
return false
}
m . i ++
return m . i < len ( m . l )
}
2017-11-30 15:34:49 +01:00
func ( m * mockChunkSeriesSet ) At ( ) ( labels . Labels , [ ] chunks . Meta , Intervals ) {
2017-05-22 16:42:36 +05:30
return m . l [ m . i ] , m . cm [ m . i ] , nil
2017-05-01 14:33:56 +05:30
}
func ( m * mockChunkSeriesSet ) Err ( ) error {
return nil
}
2017-07-05 16:19:28 +02:00
// Test the cost of merging series sets for different number of merged sets and their size.
// The subset are all equivalent so this does not capture merging of partial or non-overlapping sets well.
func BenchmarkMergedSeriesSet ( b * testing . B ) {
var sel func ( sets [ ] SeriesSet ) SeriesSet
sel = func ( sets [ ] SeriesSet ) SeriesSet {
if len ( sets ) == 0 {
2017-11-13 12:16:58 +01:00
return EmptySeriesSet ( )
2017-07-05 16:19:28 +02:00
}
if len ( sets ) == 1 {
return sets [ 0 ]
}
l := len ( sets ) / 2
return newMergedSeriesSet ( sel ( sets [ : l ] ) , sel ( sets [ l : ] ) )
}
for _ , k := range [ ] int {
100 ,
1000 ,
10000 ,
2018-09-13 19:34:26 +03:00
20000 ,
2017-07-05 16:19:28 +02:00
} {
for _ , j := range [ ] int { 1 , 2 , 4 , 8 , 16 , 32 } {
b . Run ( fmt . Sprintf ( "series=%d,blocks=%d" , k , j ) , func ( b * testing . B ) {
2018-09-13 19:34:26 +03:00
lbls , err := labels . ReadLabels ( "testdata/20kseries.json" , k )
2017-12-06 17:06:14 -08:00
testutil . Ok ( b , err )
2017-07-05 16:19:28 +02:00
sort . Sort ( labels . Slice ( lbls ) )
in := make ( [ ] [ ] Series , j )
for _ , l := range lbls {
l2 := l
for j := range in {
in [ j ] = append ( in [ j ] , & mockSeries { labels : func ( ) labels . Labels { return l2 } } )
}
}
b . ResetTimer ( )
for i := 0 ; i < b . N ; i ++ {
var sets [ ] SeriesSet
for _ , s := range in {
2018-09-21 11:07:35 +03:00
sets = append ( sets , newMockSeriesSet ( s ) )
2017-07-05 16:19:28 +02:00
}
ms := sel ( sets )
i := 0
for ms . Next ( ) {
i ++
}
2017-12-06 17:06:14 -08:00
testutil . Ok ( b , ms . Err ( ) )
testutil . Equals ( b , len ( lbls ) , i )
2017-07-05 16:19:28 +02:00
}
} )
}
}
}
2017-11-30 15:34:49 +01:00
type mockChunkReader map [ uint64 ] chunkenc . Chunk
func ( cr mockChunkReader ) Chunk ( id uint64 ) ( chunkenc . Chunk , error ) {
chk , ok := cr [ id ]
if ok {
return chk , nil
}
return nil , errors . New ( "Chunk with ref not found" )
}
func ( cr mockChunkReader ) Close ( ) error {
return nil
}
func TestDeletedIterator ( t * testing . T ) {
2017-12-21 11:55:58 +01:00
chk := chunkenc . NewXORChunk ( )
2017-11-30 15:34:49 +01:00
app , err := chk . Appender ( )
testutil . Ok ( t , err )
// Insert random stuff from (0, 1000).
act := make ( [ ] sample , 1000 )
for i := 0 ; i < 1000 ; i ++ {
act [ i ] . t = int64 ( i )
act [ i ] . v = rand . Float64 ( )
app . Append ( act [ i ] . t , act [ i ] . v )
}
cases := [ ] struct {
r Intervals
} {
{ r : Intervals { { 1 , 20 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 20 } , { 21 , 23 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 20 } , { 20 , 30 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 23 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 23 } , { 12 , 20 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 23 } , { 12 , 20 } , { 25 , 3000 } } } ,
{ r : Intervals { { 0 , 2000 } } } ,
{ r : Intervals { { 500 , 2000 } } } ,
{ r : Intervals { { 0 , 200 } } } ,
{ r : Intervals { { 1000 , 20000 } } } ,
}
for _ , c := range cases {
i := int64 ( - 1 )
it := & deletedIterator { it : chk . Iterator ( ) , intervals : c . r [ : ] }
ranges := c . r [ : ]
for it . Next ( ) {
i ++
for _ , tr := range ranges {
if tr . inBounds ( i ) {
i = tr . Maxt + 1
ranges = ranges [ 1 : ]
}
}
2017-11-30 15:34:49 +01:00
testutil . Assert ( t , i < 1000 , "" )
2017-11-30 15:34:49 +01:00
ts , v := it . At ( )
testutil . Equals ( t , act [ i ] . t , ts )
testutil . Equals ( t , act [ i ] . v , v )
}
// There has been an extra call to Next().
i ++
for _ , tr := range ranges {
if tr . inBounds ( i ) {
i = tr . Maxt + 1
ranges = ranges [ 1 : ]
}
}
2017-11-30 15:34:49 +01:00
testutil . Assert ( t , i >= 1000 , "" )
2017-11-30 15:34:49 +01:00
testutil . Ok ( t , it . Err ( ) )
}
}
type series struct {
l labels . Labels
chunks [ ] chunks . Meta
}
type mockIndex struct {
series map [ uint64 ] series
labelIndex map [ string ] [ ] string
postings map [ labels . Label ] [ ] uint64
symbols map [ string ] struct { }
}
func newMockIndex ( ) mockIndex {
ix := mockIndex {
series : make ( map [ uint64 ] series ) ,
labelIndex : make ( map [ string ] [ ] string ) ,
postings : make ( map [ labels . Label ] [ ] uint64 ) ,
symbols : make ( map [ string ] struct { } ) ,
}
return ix
}
func ( m mockIndex ) Symbols ( ) ( map [ string ] struct { } , error ) {
return m . symbols , nil
}
func ( m mockIndex ) AddSeries ( ref uint64 , l labels . Labels , chunks ... chunks . Meta ) error {
if _ , ok := m . series [ ref ] ; ok {
return errors . Errorf ( "series with reference %d already added" , ref )
}
for _ , lbl := range l {
m . symbols [ lbl . Name ] = struct { } { }
m . symbols [ lbl . Value ] = struct { } { }
}
s := series { l : l }
// Actual chunk data is not stored in the index.
for _ , c := range chunks {
c . Chunk = nil
s . chunks = append ( s . chunks , c )
}
m . series [ ref ] = s
return nil
}
func ( m mockIndex ) WriteLabelIndex ( names [ ] string , values [ ] string ) error {
// TODO support composite indexes
if len ( names ) != 1 {
return errors . New ( "composite indexes not supported yet" )
}
sort . Strings ( values )
m . labelIndex [ names [ 0 ] ] = values
return nil
}
func ( m mockIndex ) WritePostings ( name , value string , it index . Postings ) error {
l := labels . Label { Name : name , Value : value }
if _ , ok := m . postings [ l ] ; ok {
return errors . Errorf ( "postings for %s already added" , l )
}
ep , err := index . ExpandPostings ( it )
if err != nil {
return err
}
m . postings [ l ] = ep
return nil
}
func ( m mockIndex ) Close ( ) error {
return nil
}
func ( m mockIndex ) LabelValues ( names ... string ) ( index . StringTuples , error ) {
// TODO support composite indexes
if len ( names ) != 1 {
return nil , errors . New ( "composite indexes not supported yet" )
}
return index . NewStringTuples ( m . labelIndex [ names [ 0 ] ] , 1 )
}
func ( m mockIndex ) Postings ( name , value string ) ( index . Postings , error ) {
l := labels . Label { Name : name , Value : value }
return index . NewListPostings ( m . postings [ l ] ) , nil
}
func ( m mockIndex ) SortedPostings ( p index . Postings ) index . Postings {
ep , err := index . ExpandPostings ( p )
if err != nil {
return index . ErrPostings ( errors . Wrap ( err , "expand postings" ) )
}
sort . Slice ( ep , func ( i , j int ) bool {
return labels . Compare ( m . series [ ep [ i ] ] . l , m . series [ ep [ j ] ] . l ) < 0
} )
return index . NewListPostings ( ep )
}
func ( m mockIndex ) Series ( ref uint64 , lset * labels . Labels , chks * [ ] chunks . Meta ) error {
s , ok := m . series [ ref ]
if ! ok {
return ErrNotFound
}
* lset = append ( ( * lset ) [ : 0 ] , s . l ... )
* chks = append ( ( * chks ) [ : 0 ] , s . chunks ... )
return nil
}
func ( m mockIndex ) LabelIndices ( ) ( [ ] [ ] string , error ) {
res := make ( [ ] [ ] string , 0 , len ( m . labelIndex ) )
for k := range m . labelIndex {
res = append ( res , [ ] string { k } )
}
return res , nil
}