2017-04-10 20:59:45 +02:00
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
2016-12-14 15:47:05 +01:00
package tsdb
import (
2017-07-05 16:19:28 +02:00
"fmt"
2018-10-23 15:35:52 -06:00
"io/ioutil"
2017-04-14 00:36:14 +05:30
"math"
2017-04-13 19:57:31 +05:30
"math/rand"
2018-10-23 15:35:52 -06:00
"os"
2018-10-25 05:32:57 -04:00
"path/filepath"
2016-12-19 11:44:11 +01:00
"sort"
2019-06-07 15:41:44 +02:00
"strconv"
2016-12-14 15:47:05 +01:00
"testing"
2017-11-30 15:34:49 +01:00
"github.com/pkg/errors"
"github.com/prometheus/tsdb/chunkenc"
2017-04-09 19:30:25 +05:30
"github.com/prometheus/tsdb/chunks"
2017-11-30 15:34:49 +01:00
"github.com/prometheus/tsdb/index"
2017-04-04 11:27:26 +02:00
"github.com/prometheus/tsdb/labels"
2017-12-06 17:06:14 -08:00
"github.com/prometheus/tsdb/testutil"
2018-10-25 21:06:19 +01:00
"github.com/prometheus/tsdb/tsdbutil"
2016-12-14 15:47:05 +01:00
)
2018-09-21 11:07:35 +03:00
type mockSeriesSet struct {
next func ( ) bool
series func ( ) Series
err func ( ) error
}
func ( m * mockSeriesSet ) Next ( ) bool { return m . next ( ) }
func ( m * mockSeriesSet ) At ( ) Series { return m . series ( ) }
func ( m * mockSeriesSet ) Err ( ) error { return m . err ( ) }
func newMockSeriesSet ( list [ ] Series ) * mockSeriesSet {
i := - 1
return & mockSeriesSet {
next : func ( ) bool {
i ++
return i < len ( list )
} ,
series : func ( ) Series {
return list [ i ]
} ,
err : func ( ) error { return nil } ,
}
}
2017-03-14 15:24:08 +01:00
func TestMergedSeriesSet ( t * testing . T ) {
2016-12-19 11:44:11 +01:00
cases := [ ] struct {
// The input sets in order (samples in series in b are strictly
// after those in a).
a , b SeriesSet
2017-01-06 08:08:02 +01:00
// The composition of a and b in the partition series set must yield
2016-12-19 11:44:11 +01:00
// results equivalent to the result series set.
exp SeriesSet
} {
{
2018-09-21 11:07:35 +03:00
a : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
2016-12-19 11:44:11 +01:00
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
b : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 2 , v : 2 } ,
2016-12-19 11:44:11 +01:00
} ) ,
newSeries ( map [ string ] string {
"b" : "b" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
2016-12-19 11:44:11 +01:00
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2016-12-19 11:44:11 +01:00
newSeries ( map [ string ] string {
"a" : "a" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
sample { t : 2 , v : 2 } ,
2016-12-19 11:44:11 +01:00
} ) ,
newSeries ( map [ string ] string {
"b" : "b" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
2016-12-19 11:44:11 +01:00
} ) ,
} ) ,
} ,
2017-01-03 19:02:42 +01:00
{
2018-09-21 11:07:35 +03:00
a : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
2017-01-03 19:02:42 +01:00
} ) ,
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "localhost:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 2 } ,
2017-01-03 19:02:42 +01:00
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
b : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 2 , v : 1 } ,
2017-01-03 19:02:42 +01:00
} ) ,
newSeries ( map [ string ] string {
"handler" : "query" ,
"instance" : "localhost:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 2 , v : 2 } ,
2017-01-03 19:02:42 +01:00
} ) ,
} ) ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-01-03 19:02:42 +01:00
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "127.0.0.1:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 1 } ,
sample { t : 2 , v : 1 } ,
2017-01-03 19:02:42 +01:00
} ) ,
newSeries ( map [ string ] string {
"handler" : "prometheus" ,
"instance" : "localhost:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 1 , v : 2 } ,
2017-01-03 19:02:42 +01:00
} ) ,
newSeries ( map [ string ] string {
"handler" : "query" ,
"instance" : "localhost:9090" ,
2019-01-28 14:24:49 +03:00
} , [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { t : 2 , v : 2 } ,
2017-01-03 19:02:42 +01:00
} ) ,
} ) ,
} ,
2016-12-19 11:44:11 +01:00
}
Outer :
for _ , c := range cases {
2017-03-14 15:24:08 +01:00
res := newMergedSeriesSet ( c . a , c . b )
2016-12-19 11:44:11 +01:00
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2016-12-19 11:44:11 +01:00
if ! eok {
continue Outer
}
2017-01-02 13:27:52 +01:00
sexp := c . exp . At ( )
sres := res . At ( )
2016-12-19 11:44:11 +01:00
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2016-12-19 11:44:11 +01:00
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2016-12-19 11:44:11 +01:00
}
}
}
2019-02-14 18:59:41 +05:30
func expandSeriesIterator ( it SeriesIterator ) ( r [ ] tsdbutil . Sample , err error ) {
2016-12-19 11:44:11 +01:00
for it . Next ( ) {
2017-01-02 13:27:52 +01:00
t , v := it . At ( )
2016-12-19 11:44:11 +01:00
r = append ( r , sample { t : t , v : v } )
}
return r , it . Err ( )
}
2017-04-09 19:30:25 +05:30
2018-10-12 10:45:19 +01:00
type seriesSamples struct {
2017-04-13 19:57:31 +05:30
lset map [ string ] string
chunks [ ] [ ] sample
2018-10-12 10:45:19 +01:00
}
// Index: labels -> postings -> chunkMetas -> chunkRef
// ChunkReader: ref -> vals
2019-04-25 13:07:04 +03:00
func createIdxChkReaders ( t * testing . T , tc [ ] seriesSamples ) ( IndexReader , ChunkReader , int64 , int64 ) {
2017-04-13 19:57:31 +05:30
sort . Slice ( tc , func ( i , j int ) bool {
return labels . Compare ( labels . FromMap ( tc [ i ] . lset ) , labels . FromMap ( tc [ i ] . lset ) ) < 0
} )
2017-11-30 15:34:49 +01:00
postings := index . NewMemPostings ( )
chkReader := mockChunkReader ( make ( map [ uint64 ] chunkenc . Chunk ) )
2017-07-21 01:37:52 -07:00
lblIdx := make ( map [ string ] stringset )
2017-04-13 19:57:31 +05:30
mi := newMockIndex ( )
2019-02-14 18:59:41 +05:30
blockMint := int64 ( math . MaxInt64 )
blockMaxt := int64 ( math . MinInt64 )
2017-04-13 19:57:31 +05:30
2019-04-18 21:11:39 +08:00
var chunkRef uint64
2017-04-13 19:57:31 +05:30
for i , s := range tc {
2017-05-22 13:31:57 +05:30
i = i + 1 // 0 is not a valid posting.
2017-11-30 15:34:49 +01:00
metas := make ( [ ] chunks . Meta , 0 , len ( s . chunks ) )
2017-04-13 19:57:31 +05:30
for _ , chk := range s . chunks {
2019-02-14 18:59:41 +05:30
if chk [ 0 ] . t < blockMint {
blockMint = chk [ 0 ] . t
}
if chk [ len ( chk ) - 1 ] . t > blockMaxt {
blockMaxt = chk [ len ( chk ) - 1 ] . t
}
2017-11-30 15:34:49 +01:00
metas = append ( metas , chunks . Meta {
2017-04-13 19:57:31 +05:30
MinTime : chk [ 0 ] . t ,
MaxTime : chk [ len ( chk ) - 1 ] . t ,
2019-04-18 21:11:39 +08:00
Ref : chunkRef ,
2017-04-13 19:57:31 +05:30
} )
2017-11-30 15:34:49 +01:00
chunk := chunkenc . NewXORChunk ( )
2017-04-13 19:57:31 +05:30
app , _ := chunk . Appender ( )
for _ , smpl := range chk {
app . Append ( smpl . t , smpl . v )
}
2019-04-18 21:11:39 +08:00
chkReader [ chunkRef ] = chunk
chunkRef += 1
2017-04-13 19:57:31 +05:30
}
2017-07-21 01:37:52 -07:00
ls := labels . FromMap ( s . lset )
2019-04-25 13:07:04 +03:00
testutil . Ok ( t , mi . AddSeries ( uint64 ( i ) , ls , metas ... ) )
2017-04-13 19:57:31 +05:30
2017-11-30 15:34:49 +01:00
postings . Add ( uint64 ( i ) , ls )
2017-07-21 01:37:52 -07:00
2017-09-05 11:45:18 +02:00
for _ , l := range ls {
2017-07-21 01:37:52 -07:00
vs , present := lblIdx [ l . Name ]
if ! present {
vs = stringset { }
lblIdx [ l . Name ] = vs
}
vs . set ( l . Value )
2017-04-13 19:57:31 +05:30
}
}
2017-07-21 01:37:52 -07:00
for l , vs := range lblIdx {
2019-04-25 13:07:04 +03:00
testutil . Ok ( t , mi . WriteLabelIndex ( [ ] string { l } , vs . slice ( ) ) )
2017-07-21 01:37:52 -07:00
}
2019-04-25 13:07:04 +03:00
testutil . Ok ( t , postings . Iter ( func ( l labels . Label , p index . Postings ) error {
2017-11-30 15:34:49 +01:00
return mi . WritePostings ( l . Name , l . Value , p )
2019-04-25 13:07:04 +03:00
} ) )
2017-04-13 19:57:31 +05:30
2019-02-14 18:59:41 +05:30
return mi , chkReader , blockMint , blockMaxt
2017-04-13 19:57:31 +05:30
}
func TestBlockQuerier ( t * testing . T ) {
2019-01-28 14:24:49 +03:00
newSeries := func ( l map [ string ] string , s [ ] tsdbutil . Sample ) Series {
2017-04-14 00:36:14 +05:30
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
2017-04-13 19:57:31 +05:30
2017-04-22 01:38:26 +05:30
type query struct {
mint , maxt int64
ms [ ] labels . Matcher
exp SeriesSet
}
cases := struct {
2018-10-12 10:45:19 +01:00
data [ ] seriesSamples
2017-04-22 01:38:26 +05:30
queries [ ] query
2017-04-13 19:57:31 +05:30
} {
2018-10-12 10:45:19 +01:00
data : [ ] seriesSamples {
2017-04-13 19:57:31 +05:30
{
lset : map [ string ] string {
"a" : "a" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 2 } , { 2 , 3 } , { 3 , 4 } ,
} ,
{
{ 5 , 2 } , { 6 , 3 } , { 7 , 4 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
2017-04-14 00:36:14 +05:30
{ 5 , 3 } , { 6 , 6 } ,
2017-04-13 19:57:31 +05:30
} ,
} ,
} ,
{
lset : map [ string ] string {
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 3 } , { 2 , 2 } , { 3 , 6 } ,
} ,
{
{ 5 , 1 } , { 6 , 7 } , { 7 , 2 } ,
} ,
} ,
} ,
} ,
2017-04-22 01:38:26 +05:30
queries : [ ] query {
{
mint : 0 ,
maxt : 0 ,
ms : [ ] labels . Matcher { } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 0 ,
maxt : 0 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 1 ,
maxt : 0 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-04-22 01:38:26 +05:30
} ,
{
mint : 2 ,
maxt : 6 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-04-22 01:38:26 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 2 , 3 } , sample { 3 , 4 } , sample { 5 , 2 } , sample { 6 , 3 } } ,
2017-04-22 01:38:26 +05:30
) ,
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 2 , 2 } , sample { 3 , 3 } , sample { 5 , 3 } , sample { 6 , 6 } } ,
2017-04-22 01:38:26 +05:30
) ,
} ) ,
} ,
2017-04-14 00:36:14 +05:30
} ,
2017-04-13 19:57:31 +05:30
}
Outer :
2017-12-06 16:27:09 -08:00
for _ , c := range cases . queries {
2019-04-25 13:07:04 +03:00
ir , cr , _ , _ := createIdxChkReaders ( t , cases . data )
2017-04-13 19:57:31 +05:30
querier := & blockQuerier {
2017-05-17 14:49:42 +05:30
index : ir ,
chunks : cr ,
2018-11-14 18:40:01 +02:00
tombstones : newMemTombstones ( ) ,
2017-04-13 19:57:31 +05:30
mint : c . mint ,
maxt : c . maxt ,
}
2017-11-13 12:16:58 +01:00
res , err := querier . Select ( c . ms ... )
2017-12-06 17:06:14 -08:00
testutil . Ok ( t , err )
2017-04-13 19:57:31 +05:30
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2017-04-13 19:57:31 +05:30
if ! eok {
continue Outer
}
sexp := c . exp . At ( )
sres := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2017-04-13 19:57:31 +05:30
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-13 19:57:31 +05:30
}
}
}
2017-04-09 19:30:25 +05:30
2017-05-22 13:31:57 +05:30
func TestBlockQuerierDelete ( t * testing . T ) {
2019-01-28 14:24:49 +03:00
newSeries := func ( l map [ string ] string , s [ ] tsdbutil . Sample ) Series {
2017-05-22 13:31:57 +05:30
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
type query struct {
mint , maxt int64
ms [ ] labels . Matcher
exp SeriesSet
}
cases := struct {
2018-10-12 10:45:19 +01:00
data [ ] seriesSamples
2017-05-22 13:31:57 +05:30
2017-11-13 13:32:24 +01:00
tombstones TombstoneReader
2017-05-22 13:31:57 +05:30
queries [ ] query
} {
2018-10-12 10:45:19 +01:00
data : [ ] seriesSamples {
2017-05-22 13:31:57 +05:30
{
lset : map [ string ] string {
"a" : "a" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 2 } , { 2 , 3 } , { 3 , 4 } ,
} ,
{
{ 5 , 2 } , { 6 , 3 } , { 7 , 4 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 1 } , { 2 , 2 } , { 3 , 3 } ,
} ,
{
{ 4 , 15 } , { 5 , 3 } , { 6 , 6 } ,
} ,
} ,
} ,
{
lset : map [ string ] string {
"b" : "b" ,
} ,
chunks : [ ] [ ] sample {
{
{ 1 , 3 } , { 2 , 2 } , { 3 , 6 } ,
} ,
{
{ 5 , 1 } , { 6 , 7 } , { 7 , 2 } ,
} ,
} ,
} ,
} ,
2018-07-10 21:24:13 +08:00
tombstones : & memTombstones { intvlGroups : map [ uint64 ] Intervals {
2018-07-06 20:30:27 +08:00
1 : Intervals { { 1 , 3 } } ,
2 : Intervals { { 1 , 3 } , { 6 , 10 } } ,
3 : Intervals { { 6 , 10 } } ,
} } ,
2017-05-22 13:31:57 +05:30
queries : [ ] query {
{
mint : 2 ,
maxt : 7 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 5 , 2 } , sample { 6 , 3 } , sample { 7 , 4 } } ,
2017-05-22 13:31:57 +05:30
) ,
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 4 , 15 } , sample { 5 , 3 } } ,
2017-05-22 13:31:57 +05:30
) ,
} ) ,
} ,
{
mint : 2 ,
maxt : 7 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "b" , "b" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 4 , 15 } , sample { 5 , 3 } } ,
2017-05-22 13:31:57 +05:30
) ,
newSeries ( map [ string ] string {
"b" : "b" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 2 , 2 } , sample { 3 , 6 } , sample { 5 , 1 } } ,
2017-05-22 13:31:57 +05:30
) ,
} ) ,
} ,
{
mint : 1 ,
maxt : 4 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series {
2017-05-22 13:31:57 +05:30
newSeries ( map [ string ] string {
"a" : "a" ,
"b" : "b" ,
} ,
2019-01-28 14:24:49 +03:00
[ ] tsdbutil . Sample { sample { 4 , 15 } } ,
2017-05-22 13:31:57 +05:30
) ,
} ) ,
} ,
{
mint : 1 ,
maxt : 3 ,
ms : [ ] labels . Matcher { labels . NewEqualMatcher ( "a" , "a" ) } ,
2018-09-21 11:07:35 +03:00
exp : newMockSeriesSet ( [ ] Series { } ) ,
2017-05-22 13:31:57 +05:30
} ,
} ,
}
Outer :
for _ , c := range cases . queries {
2019-04-25 13:07:04 +03:00
ir , cr , _ , _ := createIdxChkReaders ( t , cases . data )
2017-05-22 13:31:57 +05:30
querier := & blockQuerier {
index : ir ,
chunks : cr ,
2017-05-24 11:24:24 +05:30
tombstones : cases . tombstones ,
2017-05-22 13:31:57 +05:30
mint : c . mint ,
maxt : c . maxt ,
}
2017-11-13 12:16:58 +01:00
res , err := querier . Select ( c . ms ... )
2017-12-06 17:06:14 -08:00
testutil . Ok ( t , err )
2017-05-22 13:31:57 +05:30
for {
eok , rok := c . exp . Next ( ) , res . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eok , rok )
2017-05-22 13:31:57 +05:30
if ! eok {
continue Outer
}
sexp := c . exp . At ( )
sres := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
2017-05-22 13:31:57 +05:30
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-05-22 13:31:57 +05:30
}
}
}
2017-04-09 19:30:25 +05:30
func TestBaseChunkSeries ( t * testing . T ) {
type refdSeries struct {
lset labels . Labels
2017-11-30 15:34:49 +01:00
chunks [ ] chunks . Meta
2017-04-09 19:30:25 +05:30
2017-09-04 16:08:38 +02:00
ref uint64
2017-04-09 19:30:25 +05:30
}
cases := [ ] struct {
series [ ] refdSeries
// Postings should be in the sorted order of the the series
2017-09-04 16:08:38 +02:00
postings [ ] uint64
2017-04-09 19:30:25 +05:30
expIdxs [ ] int
} {
{
series : [ ] refdSeries {
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 29 } , { Ref : 45 } , { Ref : 245 } , { Ref : 123 } , { Ref : 4232 } , { Ref : 5344 } ,
{ Ref : 121 } ,
} ,
ref : 12 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } , { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 82 } , { Ref : 23 } , { Ref : 234 } , { Ref : 65 } , { Ref : 26 } ,
} ,
ref : 10 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "c" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta { { Ref : 8282 } } ,
2017-04-09 19:30:25 +05:30
ref : 1 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 829 } , { Ref : 239 } , { Ref : 2349 } , { Ref : 659 } , { Ref : 269 } ,
} ,
ref : 108 ,
} ,
} ,
2017-10-11 09:33:35 +02:00
postings : [ ] uint64 { 12 , 13 , 10 , 108 } , // 13 doesn't exist and should just be skipped over.
expIdxs : [ ] int { 0 , 1 , 3 } ,
2017-04-09 19:30:25 +05:30
} ,
{
series : [ ] refdSeries {
{
lset : labels . New ( [ ] labels . Label { { "a" , "a" } , { "b" , "b" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta {
2017-04-09 19:30:25 +05:30
{ Ref : 82 } , { Ref : 23 } , { Ref : 234 } , { Ref : 65 } , { Ref : 26 } ,
} ,
ref : 10 ,
} ,
{
lset : labels . New ( [ ] labels . Label { { "b" , "c" } } ... ) ,
2017-11-30 15:34:49 +01:00
chunks : [ ] chunks . Meta { { Ref : 8282 } } ,
2017-10-11 09:33:35 +02:00
ref : 3 ,
2017-04-09 19:30:25 +05:30
} ,
} ,
2017-09-04 16:08:38 +02:00
postings : [ ] uint64 { } ,
2017-10-11 09:33:35 +02:00
expIdxs : [ ] int { } ,
2017-04-09 19:30:25 +05:30
} ,
}
for _ , tc := range cases {
mi := newMockIndex ( )
for _ , s := range tc . series {
2019-04-25 13:07:04 +03:00
testutil . Ok ( t , mi . AddSeries ( s . ref , s . lset , s . chunks ... ) )
2017-04-09 19:30:25 +05:30
}
bcs := & baseChunkSeries {
2017-11-30 15:34:49 +01:00
p : index . NewListPostings ( tc . postings ) ,
2017-05-17 14:49:42 +05:30
index : mi ,
2018-11-14 18:40:01 +02:00
tombstones : newMemTombstones ( ) ,
2017-04-09 19:30:25 +05:30
}
i := 0
for bcs . Next ( ) {
2017-05-17 14:49:42 +05:30
lset , chks , _ := bcs . At ( )
2017-04-09 19:30:25 +05:30
idx := tc . expIdxs [ i ]
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , tc . series [ idx ] . lset , lset )
testutil . Equals ( t , tc . series [ idx ] . chunks , chks )
2017-04-09 19:30:25 +05:30
i ++
}
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , len ( tc . expIdxs ) , i )
testutil . Ok ( t , bcs . Err ( ) )
2017-04-09 19:30:25 +05:30
}
}
// TODO: Remove after simpleSeries is merged
type itSeries struct {
si SeriesIterator
}
func ( s itSeries ) Iterator ( ) SeriesIterator { return s . si }
func ( s itSeries ) Labels ( ) labels . Labels { return labels . Labels { } }
func TestSeriesIterator ( t * testing . T ) {
itcases := [ ] struct {
2019-01-28 14:24:49 +03:00
a , b , c [ ] tsdbutil . Sample
exp [ ] tsdbutil . Sample
2017-04-14 00:37:21 +05:30
mint , maxt int64
2017-04-09 19:30:25 +05:30
} {
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample { } ,
b : [ ] tsdbutil . Sample { } ,
c : [ ] tsdbutil . Sample { } ,
2017-04-09 19:30:25 +05:30
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample { } ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } ,
sample { 2 , 3 } ,
sample { 3 , 5 } ,
sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample { } ,
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } , sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample { } ,
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } , sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 2 ,
maxt : 8 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } , sample { 7 , 89 } , sample { 9 , 8 } , sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 6 ,
maxt : 10 ,
2017-04-09 19:30:25 +05:30
} ,
}
seekcases := [ ] struct {
2019-01-28 14:24:49 +03:00
a , b , c [ ] tsdbutil . Sample
2017-04-09 19:30:25 +05:30
seek int64
success bool
2019-01-28 14:24:49 +03:00
exp [ ] tsdbutil . Sample
2017-04-14 00:37:21 +05:30
mint , maxt int64
2017-04-09 19:30:25 +05:30
} {
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample { } ,
b : [ ] tsdbutil . Sample { } ,
c : [ ] tsdbutil . Sample { } ,
2017-04-09 19:30:25 +05:30
seek : 0 ,
success : false ,
exp : nil ,
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 2 , 3 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample { } ,
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 10 ,
success : false ,
exp : nil ,
2017-04-14 00:37:21 +05:30
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample { } ,
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 1 , 2 } , sample { 3 , 5 } , sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 2 ,
success : true ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 3 , 5 } , sample { 6 , 1 } , sample { 7 , 89 } , sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 5 ,
maxt : 8 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 10 ,
success : true ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 10 ,
maxt : 203 ,
2017-04-09 19:30:25 +05:30
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 6 , 1 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 9 , 8 } ,
2017-04-09 19:30:25 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
seek : 203 ,
success : true ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 203 , 3493 } ,
2017-04-09 19:30:25 +05:30
} ,
2017-04-14 00:37:21 +05:30
mint : 7 ,
maxt : 203 ,
2017-04-09 19:30:25 +05:30
} ,
}
t . Run ( "Chunk" , func ( t * testing . T ) {
for _ , tc := range itcases {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2018-10-25 21:06:19 +01:00
tsdbutil . ChunkFromSamples ( tc . a ) ,
tsdbutil . ChunkFromSamples ( tc . b ) ,
tsdbutil . ChunkFromSamples ( tc . c ) ,
2017-04-09 19:30:25 +05:30
}
2017-05-22 16:42:36 +05:30
res := newChunkSeriesIterator ( chkMetas , nil , tc . mint , tc . maxt )
2017-04-14 00:37:21 +05:30
2019-01-28 14:24:49 +03:00
smplValid := make ( [ ] tsdbutil . Sample , 0 )
2017-04-14 00:37:21 +05:30
for _ , s := range tc . exp {
2018-10-25 21:06:19 +01:00
if s . T ( ) >= tc . mint && s . T ( ) <= tc . maxt {
2019-01-28 14:24:49 +03:00
smplValid = append ( smplValid , tsdbutil . Sample ( s ) )
2017-04-14 00:37:21 +05:30
}
}
exp := newListSeriesIterator ( smplValid )
2017-04-09 19:30:25 +05:30
smplExp , errExp := expandSeriesIterator ( exp )
smplRes , errRes := expandSeriesIterator ( res )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-09 19:30:25 +05:30
}
t . Run ( "Seek" , func ( t * testing . T ) {
2017-04-14 00:37:21 +05:30
extra := [ ] struct {
2019-01-28 14:24:49 +03:00
a , b , c [ ] tsdbutil . Sample
2017-04-14 00:37:21 +05:30
seek int64
success bool
2019-01-28 14:24:49 +03:00
exp [ ] tsdbutil . Sample
2017-04-14 00:37:21 +05:30
mint , maxt int64
} {
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 6 , 1 } ,
2017-04-14 00:37:21 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 9 , 8 } ,
2017-04-14 00:37:21 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-14 00:37:21 +05:30
} ,
seek : 203 ,
success : false ,
exp : nil ,
mint : 2 ,
maxt : 202 ,
} ,
{
2019-01-28 14:24:49 +03:00
a : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 6 , 1 } ,
2017-04-14 00:37:21 +05:30
} ,
2019-01-28 14:24:49 +03:00
b : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 9 , 8 } ,
2017-04-14 00:37:21 +05:30
} ,
2019-01-28 14:24:49 +03:00
c : [ ] tsdbutil . Sample {
2018-10-25 21:06:19 +01:00
sample { 10 , 22 } , sample { 203 , 3493 } ,
2017-04-14 00:37:21 +05:30
} ,
seek : 5 ,
success : true ,
2019-01-28 14:24:49 +03:00
exp : [ ] tsdbutil . Sample { sample { 10 , 22 } } ,
2017-04-14 00:37:21 +05:30
mint : 10 ,
maxt : 202 ,
} ,
}
seekcases2 := append ( seekcases , extra ... )
for _ , tc := range seekcases2 {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2018-10-25 21:06:19 +01:00
tsdbutil . ChunkFromSamples ( tc . a ) ,
tsdbutil . ChunkFromSamples ( tc . b ) ,
tsdbutil . ChunkFromSamples ( tc . c ) ,
2017-04-09 19:30:25 +05:30
}
2017-05-22 16:42:36 +05:30
res := newChunkSeriesIterator ( chkMetas , nil , tc . mint , tc . maxt )
2017-04-14 00:37:21 +05:30
2019-01-28 14:24:49 +03:00
smplValid := make ( [ ] tsdbutil . Sample , 0 )
2017-04-14 00:37:21 +05:30
for _ , s := range tc . exp {
2018-10-25 21:06:19 +01:00
if s . T ( ) >= tc . mint && s . T ( ) <= tc . maxt {
2019-01-28 14:24:49 +03:00
smplValid = append ( smplValid , tsdbutil . Sample ( s ) )
2017-04-14 00:37:21 +05:30
}
}
exp := newListSeriesIterator ( smplValid )
2017-04-09 19:30:25 +05:30
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , tc . success , res . Seek ( tc . seek ) )
2017-04-09 19:30:25 +05:30
if tc . success {
// Init the list and then proceed to check.
remaining := exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , remaining == true , "" )
2017-04-09 19:30:25 +05:30
for remaining {
sExp , eExp := exp . At ( )
sRes , eRes := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , eExp , eRes )
testutil . Equals ( t , sExp , sRes )
2017-04-09 19:30:25 +05:30
remaining = exp . Next ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , remaining , res . Next ( ) )
2017-04-09 19:30:25 +05:30
}
}
}
} )
} )
t . Run ( "Chain" , func ( t * testing . T ) {
2019-02-14 18:59:41 +05:30
// Extra cases for overlapping series.
itcasesExtra := [ ] struct {
a , b , c [ ] tsdbutil . Sample
exp [ ] tsdbutil . Sample
mint , maxt int64
} {
{
a : [ ] tsdbutil . Sample {
sample { 1 , 2 } , sample { 2 , 3 } , sample { 3 , 5 } , sample { 6 , 1 } ,
} ,
b : [ ] tsdbutil . Sample {
sample { 5 , 49 } , sample { 7 , 89 } , sample { 9 , 8 } ,
} ,
c : [ ] tsdbutil . Sample {
sample { 2 , 33 } , sample { 4 , 44 } , sample { 10 , 3 } ,
} ,
exp : [ ] tsdbutil . Sample {
sample { 1 , 2 } , sample { 2 , 33 } , sample { 3 , 5 } , sample { 4 , 44 } , sample { 5 , 49 } , sample { 6 , 1 } , sample { 7 , 89 } , sample { 9 , 8 } , sample { 10 , 3 } ,
} ,
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
} ,
{
a : [ ] tsdbutil . Sample {
sample { 1 , 2 } , sample { 2 , 3 } , sample { 9 , 5 } , sample { 13 , 1 } ,
} ,
b : [ ] tsdbutil . Sample { } ,
c : [ ] tsdbutil . Sample {
sample { 1 , 23 } , sample { 2 , 342 } , sample { 3 , 25 } , sample { 6 , 11 } ,
} ,
exp : [ ] tsdbutil . Sample {
sample { 1 , 23 } , sample { 2 , 342 } , sample { 3 , 25 } , sample { 6 , 11 } , sample { 9 , 5 } , sample { 13 , 1 } ,
} ,
mint : math . MinInt64 ,
maxt : math . MaxInt64 ,
} ,
}
2017-04-09 19:30:25 +05:30
for _ , tc := range itcases {
a , b , c := itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) }
res := newChainedSeriesIterator ( a , b , c )
2019-01-28 14:24:49 +03:00
exp := newListSeriesIterator ( [ ] tsdbutil . Sample ( tc . exp ) )
2017-04-09 19:30:25 +05:30
smplExp , errExp := expandSeriesIterator ( exp )
smplRes , errRes := expandSeriesIterator ( res )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
2017-04-09 19:30:25 +05:30
}
2019-02-14 18:59:41 +05:30
for _ , tc := range append ( itcases , itcasesExtra ... ) {
a , b , c := itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) }
res := newVerticalMergeSeriesIterator ( a , b , c )
exp := newListSeriesIterator ( [ ] tsdbutil . Sample ( tc . exp ) )
smplExp , errExp := expandSeriesIterator ( exp )
smplRes , errRes := expandSeriesIterator ( res )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
}
2017-04-09 19:30:25 +05:30
t . Run ( "Seek" , func ( t * testing . T ) {
for _ , tc := range seekcases {
2019-02-14 18:59:41 +05:30
ress := [ ] SeriesIterator {
newChainedSeriesIterator (
itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) } ,
) ,
newVerticalMergeSeriesIterator (
itSeries { newListSeriesIterator ( tc . a ) } ,
itSeries { newListSeriesIterator ( tc . b ) } ,
itSeries { newListSeriesIterator ( tc . c ) } ,
) ,
}
2017-04-09 19:30:25 +05:30
2019-02-14 18:59:41 +05:30
for _ , res := range ress {
exp := newListSeriesIterator ( tc . exp )
2017-04-09 19:30:25 +05:30
2019-02-14 18:59:41 +05:30
testutil . Equals ( t , tc . success , res . Seek ( tc . seek ) )
2017-04-09 19:30:25 +05:30
2019-02-14 18:59:41 +05:30
if tc . success {
// Init the list and then proceed to check.
remaining := exp . Next ( )
testutil . Assert ( t , remaining == true , "" )
2017-04-09 19:30:25 +05:30
2019-02-14 18:59:41 +05:30
for remaining {
sExp , eExp := exp . At ( )
sRes , eRes := res . At ( )
testutil . Equals ( t , eExp , eRes )
testutil . Equals ( t , sExp , sRes )
2017-04-09 19:30:25 +05:30
2019-02-14 18:59:41 +05:30
remaining = exp . Next ( )
testutil . Equals ( t , remaining , res . Next ( ) )
}
2017-04-09 19:30:25 +05:30
}
}
}
} )
} )
}
2017-05-01 14:33:56 +05:30
2017-06-13 13:21:22 +05:30
// Regression for: https://github.com/prometheus/tsdb/pull/97
2017-06-30 15:06:27 +02:00
func TestChunkSeriesIterator_DoubleSeek ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
chkMetas := [ ] chunks . Meta {
2019-01-28 14:24:49 +03:00
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { } ) ,
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { sample { 1 , 1 } , sample { 2 , 2 } , sample { 3 , 3 } } ) ,
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { sample { 4 , 4 } , sample { 5 , 5 } } ) ,
2017-06-13 13:21:22 +05:30
}
res := newChunkSeriesIterator ( chkMetas , nil , 2 , 8 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , res . Seek ( 1 ) == true , "" )
testutil . Assert ( t , res . Seek ( 2 ) == true , "" )
2017-06-13 13:21:22 +05:30
ts , v := res . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 2 ) , ts )
testutil . Equals ( t , float64 ( 2 ) , v )
2017-06-13 13:21:22 +05:30
}
2017-06-30 15:06:27 +02:00
// Regression when seeked chunks were still found via binary search and we always
// skipped to the end when seeking a value in the current chunk.
func TestChunkSeriesIterator_SeekInCurrentChunk ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
metas := [ ] chunks . Meta {
2019-01-28 14:24:49 +03:00
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { } ) ,
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { sample { 1 , 2 } , sample { 3 , 4 } , sample { 5 , 6 } , sample { 7 , 8 } } ) ,
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { } ) ,
2017-06-30 15:06:27 +02:00
}
it := newChunkSeriesIterator ( metas , nil , 1 , 7 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Next ( ) == true , "" )
2017-06-30 15:06:27 +02:00
ts , v := it . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 1 ) , ts )
testutil . Equals ( t , float64 ( 2 ) , v )
2017-06-30 15:06:27 +02:00
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Seek ( 4 ) == true , "" )
2017-06-30 15:06:27 +02:00
ts , v = it . At ( )
2017-12-06 17:06:14 -08:00
testutil . Equals ( t , int64 ( 5 ) , ts )
testutil . Equals ( t , float64 ( 6 ) , v )
2017-06-30 15:06:27 +02:00
}
2017-08-29 00:39:17 +02:00
// Regression when calling Next() with a time bounded to fit within two samples.
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
func TestChunkSeriesIterator_NextWithMinTime ( t * testing . T ) {
2017-11-30 15:34:49 +01:00
metas := [ ] chunks . Meta {
2019-01-28 14:24:49 +03:00
tsdbutil . ChunkFromSamples ( [ ] tsdbutil . Sample { sample { 1 , 6 } , sample { 5 , 6 } , sample { 7 , 8 } } ) ,
2017-08-29 00:39:17 +02:00
}
it := newChunkSeriesIterator ( metas , nil , 2 , 4 )
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , it . Next ( ) == false , "" )
2017-08-29 00:39:17 +02:00
}
2017-05-01 14:33:56 +05:30
func TestPopulatedCSReturnsValidChunkSlice ( t * testing . T ) {
lbls := [ ] labels . Labels { labels . New ( labels . Label { "a" , "b" } ) }
2017-11-30 15:34:49 +01:00
chunkMetas := [ ] [ ] chunks . Meta {
2017-05-01 14:33:56 +05:30
{
{ MinTime : 1 , MaxTime : 2 , Ref : 1 } ,
{ MinTime : 3 , MaxTime : 4 , Ref : 2 } ,
{ MinTime : 10 , MaxTime : 12 , Ref : 3 } ,
} ,
}
cr := mockChunkReader (
2017-11-30 15:34:49 +01:00
map [ uint64 ] chunkenc . Chunk {
1 : chunkenc . NewXORChunk ( ) ,
2 : chunkenc . NewXORChunk ( ) ,
3 : chunkenc . NewXORChunk ( ) ,
2017-05-01 14:33:56 +05:30
} ,
)
m := & mockChunkSeriesSet { l : lbls , cm : chunkMetas , i : - 1 }
p := & populatedChunkSeries {
set : m ,
chunks : cr ,
2017-05-01 15:01:17 +05:30
mint : 0 ,
maxt : 0 ,
2017-05-01 14:33:56 +05:30
}
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 15:01:17 +05:30
p . mint = 6
p . maxt = 9
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 15:01:17 +05:30
2017-05-03 22:45:28 +05:30
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
2017-11-30 15:34:49 +01:00
chunkMetas = [ ] [ ] chunks . Meta {
2017-05-03 22:45:28 +05:30
{
{ MinTime : 1 , MaxTime : 2 , Ref : 1 } ,
} ,
}
m = & mockChunkSeriesSet { l : lbls , cm : chunkMetas , i : - 1 }
p = & populatedChunkSeries {
set : m ,
chunks : cr ,
mint : 10 ,
maxt : 15 ,
}
2017-12-06 17:06:14 -08:00
testutil . Assert ( t , p . Next ( ) == false , "" )
2017-05-01 14:33:56 +05:30
}
type mockChunkSeriesSet struct {
l [ ] labels . Labels
2017-11-30 15:34:49 +01:00
cm [ ] [ ] chunks . Meta
2017-05-01 14:33:56 +05:30
i int
}
func ( m * mockChunkSeriesSet ) Next ( ) bool {
if len ( m . l ) != len ( m . cm ) {
return false
}
m . i ++
return m . i < len ( m . l )
}
2017-11-30 15:34:49 +01:00
func ( m * mockChunkSeriesSet ) At ( ) ( labels . Labels , [ ] chunks . Meta , Intervals ) {
2017-05-22 16:42:36 +05:30
return m . l [ m . i ] , m . cm [ m . i ] , nil
2017-05-01 14:33:56 +05:30
}
func ( m * mockChunkSeriesSet ) Err ( ) error {
return nil
}
2017-07-05 16:19:28 +02:00
// Test the cost of merging series sets for different number of merged sets and their size.
// The subset are all equivalent so this does not capture merging of partial or non-overlapping sets well.
func BenchmarkMergedSeriesSet ( b * testing . B ) {
var sel func ( sets [ ] SeriesSet ) SeriesSet
sel = func ( sets [ ] SeriesSet ) SeriesSet {
if len ( sets ) == 0 {
2017-11-13 12:16:58 +01:00
return EmptySeriesSet ( )
2017-07-05 16:19:28 +02:00
}
if len ( sets ) == 1 {
return sets [ 0 ]
}
l := len ( sets ) / 2
return newMergedSeriesSet ( sel ( sets [ : l ] ) , sel ( sets [ l : ] ) )
}
for _ , k := range [ ] int {
100 ,
1000 ,
10000 ,
2018-09-13 19:34:26 +03:00
20000 ,
2017-07-05 16:19:28 +02:00
} {
for _ , j := range [ ] int { 1 , 2 , 4 , 8 , 16 , 32 } {
b . Run ( fmt . Sprintf ( "series=%d,blocks=%d" , k , j ) , func ( b * testing . B ) {
2018-10-25 05:32:57 -04:00
lbls , err := labels . ReadLabels ( filepath . Join ( "testdata" , "20kseries.json" ) , k )
2017-12-06 17:06:14 -08:00
testutil . Ok ( b , err )
2017-07-05 16:19:28 +02:00
sort . Sort ( labels . Slice ( lbls ) )
in := make ( [ ] [ ] Series , j )
for _ , l := range lbls {
l2 := l
for j := range in {
in [ j ] = append ( in [ j ] , & mockSeries { labels : func ( ) labels . Labels { return l2 } } )
}
}
b . ResetTimer ( )
for i := 0 ; i < b . N ; i ++ {
var sets [ ] SeriesSet
for _ , s := range in {
2018-09-21 11:07:35 +03:00
sets = append ( sets , newMockSeriesSet ( s ) )
2017-07-05 16:19:28 +02:00
}
ms := sel ( sets )
i := 0
for ms . Next ( ) {
i ++
}
2017-12-06 17:06:14 -08:00
testutil . Ok ( b , ms . Err ( ) )
testutil . Equals ( b , len ( lbls ) , i )
2017-07-05 16:19:28 +02:00
}
} )
}
}
}
2017-11-30 15:34:49 +01:00
type mockChunkReader map [ uint64 ] chunkenc . Chunk
func ( cr mockChunkReader ) Chunk ( id uint64 ) ( chunkenc . Chunk , error ) {
chk , ok := cr [ id ]
if ok {
return chk , nil
}
return nil , errors . New ( "Chunk with ref not found" )
}
func ( cr mockChunkReader ) Close ( ) error {
return nil
}
func TestDeletedIterator ( t * testing . T ) {
2017-12-21 11:55:58 +01:00
chk := chunkenc . NewXORChunk ( )
2017-11-30 15:34:49 +01:00
app , err := chk . Appender ( )
testutil . Ok ( t , err )
// Insert random stuff from (0, 1000).
act := make ( [ ] sample , 1000 )
for i := 0 ; i < 1000 ; i ++ {
act [ i ] . t = int64 ( i )
act [ i ] . v = rand . Float64 ( )
app . Append ( act [ i ] . t , act [ i ] . v )
}
cases := [ ] struct {
r Intervals
} {
{ r : Intervals { { 1 , 20 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 20 } , { 21 , 23 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 20 } , { 20 , 30 } } } ,
{ r : Intervals { { 1 , 10 } , { 12 , 23 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 23 } , { 12 , 20 } , { 25 , 30 } } } ,
{ r : Intervals { { 1 , 23 } , { 12 , 20 } , { 25 , 3000 } } } ,
{ r : Intervals { { 0 , 2000 } } } ,
{ r : Intervals { { 500 , 2000 } } } ,
{ r : Intervals { { 0 , 200 } } } ,
{ r : Intervals { { 1000 , 20000 } } } ,
}
for _ , c := range cases {
i := int64 ( - 1 )
2019-07-09 15:19:34 +05:30
it := & deletedIterator { it : chk . Iterator ( nil ) , intervals : c . r [ : ] }
2017-11-30 15:34:49 +01:00
ranges := c . r [ : ]
for it . Next ( ) {
i ++
for _ , tr := range ranges {
if tr . inBounds ( i ) {
i = tr . Maxt + 1
ranges = ranges [ 1 : ]
}
}
2017-11-30 15:34:49 +01:00
testutil . Assert ( t , i < 1000 , "" )
2017-11-30 15:34:49 +01:00
ts , v := it . At ( )
testutil . Equals ( t , act [ i ] . t , ts )
testutil . Equals ( t , act [ i ] . v , v )
}
// There has been an extra call to Next().
i ++
for _ , tr := range ranges {
if tr . inBounds ( i ) {
i = tr . Maxt + 1
ranges = ranges [ 1 : ]
}
}
2017-11-30 15:34:49 +01:00
testutil . Assert ( t , i >= 1000 , "" )
2017-11-30 15:34:49 +01:00
testutil . Ok ( t , it . Err ( ) )
}
}
type series struct {
l labels . Labels
chunks [ ] chunks . Meta
}
type mockIndex struct {
series map [ uint64 ] series
labelIndex map [ string ] [ ] string
postings map [ labels . Label ] [ ] uint64
symbols map [ string ] struct { }
}
func newMockIndex ( ) mockIndex {
ix := mockIndex {
series : make ( map [ uint64 ] series ) ,
labelIndex : make ( map [ string ] [ ] string ) ,
postings : make ( map [ labels . Label ] [ ] uint64 ) ,
symbols : make ( map [ string ] struct { } ) ,
}
return ix
}
func ( m mockIndex ) Symbols ( ) ( map [ string ] struct { } , error ) {
return m . symbols , nil
}
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
func ( m * mockIndex ) AddSeries ( ref uint64 , l labels . Labels , chunks ... chunks . Meta ) error {
2017-11-30 15:34:49 +01:00
if _ , ok := m . series [ ref ] ; ok {
return errors . Errorf ( "series with reference %d already added" , ref )
}
for _ , lbl := range l {
m . symbols [ lbl . Name ] = struct { } { }
m . symbols [ lbl . Value ] = struct { } { }
}
s := series { l : l }
// Actual chunk data is not stored in the index.
for _ , c := range chunks {
c . Chunk = nil
s . chunks = append ( s . chunks , c )
}
m . series [ ref ] = s
return nil
}
func ( m mockIndex ) WriteLabelIndex ( names [ ] string , values [ ] string ) error {
// TODO support composite indexes
if len ( names ) != 1 {
return errors . New ( "composite indexes not supported yet" )
}
sort . Strings ( values )
m . labelIndex [ names [ 0 ] ] = values
return nil
}
func ( m mockIndex ) WritePostings ( name , value string , it index . Postings ) error {
l := labels . Label { Name : name , Value : value }
if _ , ok := m . postings [ l ] ; ok {
return errors . Errorf ( "postings for %s already added" , l )
}
ep , err := index . ExpandPostings ( it )
if err != nil {
return err
}
m . postings [ l ] = ep
return nil
}
func ( m mockIndex ) Close ( ) error {
return nil
}
func ( m mockIndex ) LabelValues ( names ... string ) ( index . StringTuples , error ) {
// TODO support composite indexes
if len ( names ) != 1 {
return nil , errors . New ( "composite indexes not supported yet" )
}
return index . NewStringTuples ( m . labelIndex [ names [ 0 ] ] , 1 )
}
func ( m mockIndex ) Postings ( name , value string ) ( index . Postings , error ) {
l := labels . Label { Name : name , Value : value }
return index . NewListPostings ( m . postings [ l ] ) , nil
}
func ( m mockIndex ) SortedPostings ( p index . Postings ) index . Postings {
ep , err := index . ExpandPostings ( p )
if err != nil {
return index . ErrPostings ( errors . Wrap ( err , "expand postings" ) )
}
sort . Slice ( ep , func ( i , j int ) bool {
return labels . Compare ( m . series [ ep [ i ] ] . l , m . series [ ep [ j ] ] . l ) < 0
} )
return index . NewListPostings ( ep )
}
func ( m mockIndex ) Series ( ref uint64 , lset * labels . Labels , chks * [ ] chunks . Meta ) error {
s , ok := m . series [ ref ]
if ! ok {
return ErrNotFound
}
* lset = append ( ( * lset ) [ : 0 ] , s . l ... )
* chks = append ( ( * chks ) [ : 0 ] , s . chunks ... )
return nil
}
func ( m mockIndex ) LabelIndices ( ) ( [ ] [ ] string , error ) {
res := make ( [ ] [ ] string , 0 , len ( m . labelIndex ) )
for k := range m . labelIndex {
res = append ( res , [ ] string { k } )
}
return res , nil
}
2018-11-07 21:22:41 +05:30
func ( m mockIndex ) LabelNames ( ) ( [ ] string , error ) {
labelNames := make ( [ ] string , 0 , len ( m . labelIndex ) )
for name := range m . labelIndex {
labelNames = append ( labelNames , name )
}
sort . Strings ( labelNames )
return labelNames , nil
}
2019-01-28 14:24:49 +03:00
type mockSeries struct {
labels func ( ) labels . Labels
iterator func ( ) SeriesIterator
}
func newSeries ( l map [ string ] string , s [ ] tsdbutil . Sample ) Series {
return & mockSeries {
labels : func ( ) labels . Labels { return labels . FromMap ( l ) } ,
iterator : func ( ) SeriesIterator { return newListSeriesIterator ( s ) } ,
}
}
func ( m * mockSeries ) Labels ( ) labels . Labels { return m . labels ( ) }
func ( m * mockSeries ) Iterator ( ) SeriesIterator { return m . iterator ( ) }
type listSeriesIterator struct {
list [ ] tsdbutil . Sample
idx int
}
func newListSeriesIterator ( list [ ] tsdbutil . Sample ) * listSeriesIterator {
return & listSeriesIterator { list : list , idx : - 1 }
}
func ( it * listSeriesIterator ) At ( ) ( int64 , float64 ) {
s := it . list [ it . idx ]
return s . T ( ) , s . V ( )
}
func ( it * listSeriesIterator ) Next ( ) bool {
it . idx ++
return it . idx < len ( it . list )
}
func ( it * listSeriesIterator ) Seek ( t int64 ) bool {
if it . idx == - 1 {
it . idx = 0
}
// Do binary search between current position and end.
it . idx = sort . Search ( len ( it . list ) - it . idx , func ( i int ) bool {
s := it . list [ i + it . idx ]
return s . T ( ) >= t
} )
return it . idx < len ( it . list )
}
func ( it * listSeriesIterator ) Err ( ) error {
return nil
}
2019-02-14 18:59:41 +05:30
func BenchmarkQueryIterator ( b * testing . B ) {
cases := [ ] struct {
numBlocks int
numSeries int
numSamplesPerSeriesPerBlock int
overlapPercentages [ ] int // >=0, <=100, this is w.r.t. the previous block.
} {
{
numBlocks : 20 ,
numSeries : 1000 ,
numSamplesPerSeriesPerBlock : 20000 ,
overlapPercentages : [ ] int { 0 , 10 , 30 } ,
} ,
}
for _ , c := range cases {
for _ , overlapPercentage := range c . overlapPercentages {
benchMsg := fmt . Sprintf ( "nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%" ,
c . numBlocks , c . numSeries , c . numSamplesPerSeriesPerBlock , overlapPercentage )
b . Run ( benchMsg , func ( b * testing . B ) {
dir , err := ioutil . TempDir ( "" , "bench_query_iterator" )
testutil . Ok ( b , err )
defer func ( ) {
testutil . Ok ( b , os . RemoveAll ( dir ) )
} ( )
var (
blocks [ ] * Block
overlapDelta = int64 ( overlapPercentage * c . numSamplesPerSeriesPerBlock / 100 )
prefilledLabels [ ] map [ string ] string
generatedSeries [ ] Series
)
for i := int64 ( 0 ) ; i < int64 ( c . numBlocks ) ; i ++ {
offset := i * overlapDelta
mint := i * int64 ( c . numSamplesPerSeriesPerBlock ) - offset
maxt := mint + int64 ( c . numSamplesPerSeriesPerBlock ) - 1
if len ( prefilledLabels ) == 0 {
generatedSeries = genSeries ( c . numSeries , 10 , mint , maxt )
for _ , s := range generatedSeries {
prefilledLabels = append ( prefilledLabels , s . Labels ( ) . Map ( ) )
}
} else {
generatedSeries = populateSeries ( prefilledLabels , mint , maxt )
}
block , err := OpenBlock ( nil , createBlock ( b , dir , generatedSeries ) , nil )
testutil . Ok ( b , err )
blocks = append ( blocks , block )
defer block . Close ( )
}
que := & querier {
blocks : make ( [ ] Querier , 0 , len ( blocks ) ) ,
}
for _ , blk := range blocks {
q , err := NewBlockQuerier ( blk , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( b , err )
que . blocks = append ( que . blocks , q )
}
var sq Querier = que
if overlapPercentage > 0 {
sq = & verticalQuerier {
querier : * que ,
}
}
defer sq . Close ( )
2019-06-07 15:41:44 +02:00
benchQuery ( b , c . numSeries , sq , labels . Selector { labels . NewMustRegexpMatcher ( "__name__" , ".*" ) } )
2019-02-14 18:59:41 +05:30
} )
}
}
}
func BenchmarkQuerySeek ( b * testing . B ) {
cases := [ ] struct {
numBlocks int
numSeries int
numSamplesPerSeriesPerBlock int
overlapPercentages [ ] int // >=0, <=100, this is w.r.t. the previous block.
} {
{
numBlocks : 20 ,
numSeries : 100 ,
numSamplesPerSeriesPerBlock : 2000 ,
overlapPercentages : [ ] int { 0 , 10 , 30 , 50 } ,
} ,
}
for _ , c := range cases {
for _ , overlapPercentage := range c . overlapPercentages {
benchMsg := fmt . Sprintf ( "nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%" ,
c . numBlocks , c . numSeries , c . numSamplesPerSeriesPerBlock , overlapPercentage )
b . Run ( benchMsg , func ( b * testing . B ) {
dir , err := ioutil . TempDir ( "" , "bench_query_iterator" )
testutil . Ok ( b , err )
defer func ( ) {
testutil . Ok ( b , os . RemoveAll ( dir ) )
} ( )
var (
blocks [ ] * Block
overlapDelta = int64 ( overlapPercentage * c . numSamplesPerSeriesPerBlock / 100 )
prefilledLabels [ ] map [ string ] string
generatedSeries [ ] Series
)
for i := int64 ( 0 ) ; i < int64 ( c . numBlocks ) ; i ++ {
offset := i * overlapDelta
mint := i * int64 ( c . numSamplesPerSeriesPerBlock ) - offset
maxt := mint + int64 ( c . numSamplesPerSeriesPerBlock ) - 1
if len ( prefilledLabels ) == 0 {
generatedSeries = genSeries ( c . numSeries , 10 , mint , maxt )
for _ , s := range generatedSeries {
prefilledLabels = append ( prefilledLabels , s . Labels ( ) . Map ( ) )
}
} else {
generatedSeries = populateSeries ( prefilledLabels , mint , maxt )
}
block , err := OpenBlock ( nil , createBlock ( b , dir , generatedSeries ) , nil )
testutil . Ok ( b , err )
blocks = append ( blocks , block )
defer block . Close ( )
}
que := & querier {
blocks : make ( [ ] Querier , 0 , len ( blocks ) ) ,
}
for _ , blk := range blocks {
q , err := NewBlockQuerier ( blk , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( b , err )
que . blocks = append ( que . blocks , q )
}
var sq Querier = que
if overlapPercentage > 0 {
sq = & verticalQuerier {
querier : * que ,
}
}
defer sq . Close ( )
mint := blocks [ 0 ] . meta . MinTime
maxt := blocks [ len ( blocks ) - 1 ] . meta . MaxTime
b . ResetTimer ( )
b . ReportAllocs ( )
ss , err := sq . Select ( labels . NewMustRegexpMatcher ( "__name__" , ".*" ) )
for ss . Next ( ) {
it := ss . At ( ) . Iterator ( )
for t := mint ; t <= maxt ; t ++ {
it . Seek ( t )
}
testutil . Ok ( b , it . Err ( ) )
}
testutil . Ok ( b , ss . Err ( ) )
testutil . Ok ( b , err )
} )
}
}
}
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
2019-05-27 19:24:46 +08:00
// Refer to https://github.com/prometheus/prometheus/issues/2651.
func BenchmarkSetMatcher ( b * testing . B ) {
cases := [ ] struct {
numBlocks int
numSeries int
numSamplesPerSeriesPerBlock int
cardinality int
pattern string
} {
// The first three cases are to find out whether the set
// matcher is always faster than regex matcher.
{
numBlocks : 1 ,
numSeries : 1 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
{
numBlocks : 1 ,
numSeries : 15 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
{
numBlocks : 1 ,
numSeries : 15 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100 ,
pattern : "^(?:1|2|3)$" ,
} ,
// Big data sizes benchmarks.
{
numBlocks : 20 ,
numSeries : 1000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100 ,
pattern : "^(?:1|2|3)$" ,
} ,
{
numBlocks : 20 ,
numSeries : 1000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
// Increase cardinality.
{
numBlocks : 1 ,
numSeries : 100000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 100000 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
{
numBlocks : 1 ,
numSeries : 500000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 500000 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
{
numBlocks : 10 ,
numSeries : 500000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 500000 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
{
numBlocks : 1 ,
numSeries : 1000000 ,
numSamplesPerSeriesPerBlock : 10 ,
cardinality : 1000000 ,
pattern : "^(?:1|2|3|4|5|6|7|8|9|10)$" ,
} ,
}
for _ , c := range cases {
dir , err := ioutil . TempDir ( "" , "bench_postings_for_matchers" )
testutil . Ok ( b , err )
defer func ( ) {
testutil . Ok ( b , os . RemoveAll ( dir ) )
} ( )
var (
blocks [ ] * Block
prefilledLabels [ ] map [ string ] string
generatedSeries [ ] Series
)
for i := int64 ( 0 ) ; i < int64 ( c . numBlocks ) ; i ++ {
mint := i * int64 ( c . numSamplesPerSeriesPerBlock )
maxt := mint + int64 ( c . numSamplesPerSeriesPerBlock ) - 1
if len ( prefilledLabels ) == 0 {
generatedSeries = genSeries ( c . numSeries , 10 , mint , maxt )
for _ , s := range generatedSeries {
prefilledLabels = append ( prefilledLabels , s . Labels ( ) . Map ( ) )
}
} else {
generatedSeries = populateSeries ( prefilledLabels , mint , maxt )
}
block , err := OpenBlock ( nil , createBlock ( b , dir , generatedSeries ) , nil )
testutil . Ok ( b , err )
blocks = append ( blocks , block )
defer block . Close ( )
}
que := & querier {
blocks : make ( [ ] Querier , 0 , len ( blocks ) ) ,
}
for _ , blk := range blocks {
q , err := NewBlockQuerier ( blk , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( b , err )
que . blocks = append ( que . blocks , q )
}
defer que . Close ( )
benchMsg := fmt . Sprintf ( "nSeries=%d,nBlocks=%d,cardinality=%d,pattern=\"%s\"" , c . numSeries , c . numBlocks , c . cardinality , c . pattern )
b . Run ( benchMsg , func ( b * testing . B ) {
b . ResetTimer ( )
b . ReportAllocs ( )
for n := 0 ; n < b . N ; n ++ {
_ , err := que . Select ( labels . NewMustRegexpMatcher ( "test" , c . pattern ) )
testutil . Ok ( b , err )
}
} )
}
}
// Refer to https://github.com/prometheus/prometheus/issues/2651.
func TestFindSetMatches ( t * testing . T ) {
cases := [ ] struct {
pattern string
exp [ ] string
} {
// Simple sets.
{
pattern : "^(?:foo|bar|baz)$" ,
exp : [ ] string {
"foo" ,
"bar" ,
"baz" ,
} ,
} ,
// Simple sets containing escaped characters.
{
pattern : "^(?:fo\\.o|bar\\?|\\^baz)$" ,
exp : [ ] string {
"fo.o" ,
"bar?" ,
"^baz" ,
} ,
} ,
// Simple sets containing special characters without escaping.
{
pattern : "^(?:fo.o|bar?|^baz)$" ,
exp : nil ,
} ,
// Missing wrapper.
{
pattern : "foo|bar|baz" ,
exp : nil ,
} ,
}
for _ , c := range cases {
matches := findSetMatches ( c . pattern )
if len ( c . exp ) == 0 {
if len ( matches ) != 0 {
t . Errorf ( "Evaluating %s, unexpected result %v" , c . pattern , matches )
}
} else {
if len ( matches ) != len ( c . exp ) {
t . Errorf ( "Evaluating %s, length of result not equal to exp" , c . pattern )
} else {
for i := 0 ; i < len ( c . exp ) ; i ++ {
if c . exp [ i ] != matches [ i ] {
t . Errorf ( "Evaluating %s, unexpected result %s" , c . pattern , matches [ i ] )
}
}
}
}
}
}
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
func TestPostingsForMatchers ( t * testing . T ) {
h , err := NewHead ( nil , nil , nil , 1000 )
testutil . Ok ( t , err )
defer func ( ) {
testutil . Ok ( t , h . Close ( ) )
} ( )
app := h . Appender ( )
app . Add ( labels . FromStrings ( "n" , "1" ) , 0 , 0 )
app . Add ( labels . FromStrings ( "n" , "1" , "i" , "a" ) , 0 , 0 )
app . Add ( labels . FromStrings ( "n" , "1" , "i" , "b" ) , 0 , 0 )
app . Add ( labels . FromStrings ( "n" , "2" ) , 0 , 0 )
2019-05-27 19:24:46 +08:00
app . Add ( labels . FromStrings ( "n" , "2.5" ) , 0 , 0 )
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
testutil . Ok ( t , app . Commit ( ) )
cases := [ ] struct {
matchers [ ] labels . Matcher
exp [ ] labels . Labels
} {
// Simple equals.
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewEqualMatcher ( "i" , "a" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewEqualMatcher ( "i" , "missing" ) } ,
exp : [ ] labels . Labels { } ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "missing" , "" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
labels . FromStrings ( "n" , "2" ) ,
2019-05-27 19:24:46 +08:00
labels . FromStrings ( "n" , "2.5" ) ,
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
} ,
} ,
// Not equals.
{
matchers : [ ] labels . Matcher { labels . Not ( labels . NewEqualMatcher ( "n" , "1" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "2" ) ,
2019-05-27 19:24:46 +08:00
labels . FromStrings ( "n" , "2.5" ) ,
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . Not ( labels . NewEqualMatcher ( "i" , "" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . Not ( labels . NewEqualMatcher ( "missing" , "" ) ) } ,
exp : [ ] labels . Labels { } ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewEqualMatcher ( "i" , "a" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewEqualMatcher ( "i" , "" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
// Regex.
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "n" , "^1$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewMustRegexpMatcher ( "i" , "^a$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewMustRegexpMatcher ( "i" , "^a?$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "i" , "^$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "2" ) ,
2019-05-27 19:24:46 +08:00
labels . FromStrings ( "n" , "2.5" ) ,
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewMustRegexpMatcher ( "i" , "^$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewMustRegexpMatcher ( "i" , "^.*$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . NewMustRegexpMatcher ( "i" , "^.+$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
// Not regex.
{
matchers : [ ] labels . Matcher { labels . Not ( labels . NewMustRegexpMatcher ( "n" , "^1$" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "2" ) ,
2019-05-27 19:24:46 +08:00
labels . FromStrings ( "n" , "2.5" ) ,
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewMustRegexpMatcher ( "i" , "^a$" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewMustRegexpMatcher ( "i" , "^a?$" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewMustRegexpMatcher ( "i" , "^$" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewMustRegexpMatcher ( "i" , "^.*$" ) ) } ,
exp : [ ] labels . Labels { } ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewMustRegexpMatcher ( "i" , "^.+$" ) ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
} ,
} ,
// Combinations.
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewEqualMatcher ( "i" , "" ) ) , labels . NewEqualMatcher ( "i" , "a" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewEqualMatcher ( "n" , "1" ) , labels . Not ( labels . NewEqualMatcher ( "i" , "b" ) ) , labels . NewMustRegexpMatcher ( "i" , "^(b|a).*$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
} ,
} ,
2019-05-27 19:24:46 +08:00
// Set optimization for Regex.
// Refer to https://github.com/prometheus/prometheus/issues/2651.
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "n" , "^(?:1|2)$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
labels . FromStrings ( "n" , "2" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "i" , "^(?:a|b)$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" , "i" , "a" ) ,
labels . FromStrings ( "n" , "1" , "i" , "b" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "n" , "^(?:x1|2)$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "2" ) ,
} ,
} ,
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "n" , "^(?:2|2\\.5)$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "2" ) ,
labels . FromStrings ( "n" , "2.5" ) ,
} ,
} ,
// Empty value.
{
matchers : [ ] labels . Matcher { labels . NewMustRegexpMatcher ( "i" , "^(?:c||d)$" ) } ,
exp : [ ] labels . Labels {
labels . FromStrings ( "n" , "1" ) ,
labels . FromStrings ( "n" , "2" ) ,
labels . FromStrings ( "n" , "2.5" ) ,
} ,
} ,
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
}
ir , err := h . Index ( )
testutil . Ok ( t , err )
for _ , c := range cases {
exp := map [ string ] struct { } { }
for _ , l := range c . exp {
exp [ l . String ( ) ] = struct { } { }
}
p , err := PostingsForMatchers ( ir , c . matchers ... )
testutil . Ok ( t , err )
for p . Next ( ) {
lbls := labels . Labels { }
2019-04-25 13:07:04 +03:00
testutil . Ok ( t , ir . Series ( p . At ( ) , & lbls , & [ ] chunks . Meta { } ) )
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 11:59:45 +01:00
if _ , ok := exp [ lbls . String ( ) ] ; ! ok {
t . Errorf ( "Evaluating %v, unexpected result %s" , c . matchers , lbls . String ( ) )
} else {
delete ( exp , lbls . String ( ) )
}
}
testutil . Ok ( t , p . Err ( ) )
if len ( exp ) != 0 {
t . Errorf ( "Evaluating %v, missing results %+v" , c . matchers , exp )
}
}
}
2019-04-30 10:17:07 +03:00
// TestClose ensures that calling Close more than once doesn't block and doesn't panic.
func TestClose ( t * testing . T ) {
dir , err := ioutil . TempDir ( "" , "test_storage" )
if err != nil {
t . Fatalf ( "Opening test dir failed: %s" , err )
}
defer func ( ) {
testutil . Ok ( t , os . RemoveAll ( dir ) )
} ( )
createBlock ( t , dir , genSeries ( 1 , 1 , 0 , 10 ) )
createBlock ( t , dir , genSeries ( 1 , 1 , 10 , 20 ) )
db , err := Open ( dir , nil , nil , DefaultOptions )
if err != nil {
t . Fatalf ( "Opening test storage failed: %s" , err )
}
defer func ( ) {
testutil . Ok ( t , db . Close ( ) )
} ( )
q , err := db . Querier ( 0 , 20 )
testutil . Ok ( t , err )
testutil . Ok ( t , q . Close ( ) )
testutil . NotOk ( t , q . Close ( ) )
}
2019-06-07 15:41:44 +02:00
func BenchmarkQueries ( b * testing . B ) {
cases := map [ string ] labels . Selector {
"Eq Matcher: Expansion - 1" : labels . Selector {
labels . NewEqualMatcher ( "la" , "va" ) ,
} ,
"Eq Matcher: Expansion - 2" : labels . Selector {
labels . NewEqualMatcher ( "la" , "va" ) ,
labels . NewEqualMatcher ( "lb" , "vb" ) ,
} ,
"Eq Matcher: Expansion - 3" : labels . Selector {
labels . NewEqualMatcher ( "la" , "va" ) ,
labels . NewEqualMatcher ( "lb" , "vb" ) ,
labels . NewEqualMatcher ( "lc" , "vc" ) ,
} ,
"Regex Matcher: Expansion - 1" : labels . Selector {
labels . NewMustRegexpMatcher ( "la" , ".*va" ) ,
} ,
"Regex Matcher: Expansion - 2" : labels . Selector {
labels . NewMustRegexpMatcher ( "la" , ".*va" ) ,
labels . NewMustRegexpMatcher ( "lb" , ".*vb" ) ,
} ,
"Regex Matcher: Expansion - 3" : labels . Selector {
labels . NewMustRegexpMatcher ( "la" , ".*va" ) ,
labels . NewMustRegexpMatcher ( "lb" , ".*vb" ) ,
labels . NewMustRegexpMatcher ( "lc" , ".*vc" ) ,
} ,
}
queryTypes := make ( map [ string ] Querier )
defer func ( ) {
for _ , q := range queryTypes {
// Can't run a check for error here as some of these will fail as
// queryTypes is using the same slice for the different block queriers
// and would have been closed in the previous iterration.
q . Close ( )
}
} ( )
for title , selectors := range cases {
for _ , nSeries := range [ ] int { 10 } {
for _ , nSamples := range [ ] int64 { 1000 , 10000 , 100000 } {
dir , err := ioutil . TempDir ( "" , "test_persisted_query" )
testutil . Ok ( b , err )
defer func ( ) {
testutil . Ok ( b , os . RemoveAll ( dir ) )
} ( )
series := genSeries ( nSeries , 5 , 1 , int64 ( nSamples ) )
// Add some common labels to make the matchers select these series.
{
var commonLbls labels . Labels
for _ , selector := range selectors {
switch sel := selector . ( type ) {
case * labels . EqualMatcher :
commonLbls = append ( commonLbls , labels . Label { Name : sel . Name ( ) , Value : sel . Value ( ) } )
case * labels . RegexpMatcher :
commonLbls = append ( commonLbls , labels . Label { Name : sel . Name ( ) , Value : sel . Value ( ) } )
}
}
for i := range commonLbls {
s := series [ i ] . ( * mockSeries )
allLabels := append ( commonLbls , s . Labels ( ) ... )
s = & mockSeries {
labels : func ( ) labels . Labels { return allLabels } ,
iterator : s . iterator ,
}
series [ i ] = s
}
}
qs := [ ] Querier { }
for x := 0 ; x <= 10 ; x ++ {
block , err := OpenBlock ( nil , createBlock ( b , dir , series ) , nil )
testutil . Ok ( b , err )
q , err := NewBlockQuerier ( block , 1 , int64 ( nSamples ) )
testutil . Ok ( b , err )
qs = append ( qs , q )
}
queryTypes [ "_1-Block" ] = & querier { blocks : qs [ : 1 ] }
queryTypes [ "_3-Blocks" ] = & querier { blocks : qs [ 0 : 3 ] }
queryTypes [ "_10-Blocks" ] = & querier { blocks : qs }
head := createHead ( b , series )
qHead , err := NewBlockQuerier ( head , 1 , int64 ( nSamples ) )
testutil . Ok ( b , err )
queryTypes [ "_Head" ] = qHead
for qtype , querier := range queryTypes {
b . Run ( title + qtype + "_nSeries:" + strconv . Itoa ( nSeries ) + "_nSamples:" + strconv . Itoa ( int ( nSamples ) ) , func ( b * testing . B ) {
expExpansions , err := strconv . Atoi ( string ( title [ len ( title ) - 1 ] ) )
testutil . Ok ( b , err )
benchQuery ( b , expExpansions , querier , selectors )
} )
}
}
}
}
}
func benchQuery ( b * testing . B , expExpansions int , q Querier , selectors labels . Selector ) {
b . ResetTimer ( )
b . ReportAllocs ( )
for i := 0 ; i < b . N ; i ++ {
ss , err := q . Select ( selectors ... )
testutil . Ok ( b , err )
var actualExpansions int
for ss . Next ( ) {
s := ss . At ( )
s . Labels ( )
it := s . Iterator ( )
for it . Next ( ) {
}
actualExpansions ++
}
testutil . Equals ( b , expExpansions , actualExpansions )
testutil . Ok ( b , ss . Err ( ) )
}
}