Skip to content

Commit

Permalink
satisfying the compiler
Browse files Browse the repository at this point in the history
Signed-off-by: Owen Diehl <[email protected]>
  • Loading branch information
owen-d committed Jan 25, 2024
1 parent e313ca0 commit 7f3a7bd
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 20 deletions.
4 changes: 3 additions & 1 deletion pkg/bloomcompactor/bloomcompactor.go
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,9 @@ func (c *Compactor) runCompact(ctx context.Context, logger log.Logger, job Job,
return err
}

resultingBlock, err = compactNewChunks(ctx, logger, job, bt, storeClient.chunk, builder, c.limits)
// NB(owen-d): this panics/etc, but the code is being refactored and will be removed. I've replaced `bt` with `nil`
// to pass compiler checks while keeping this code around as reference
resultingBlock, err = compactNewChunks(ctx, logger, job, nil, storeClient.chunk, builder, c.limits)
if err != nil {
return level.Error(logger).Log("msg", "failed compacting new chunks", "err", err)
}
Expand Down
13 changes: 7 additions & 6 deletions pkg/bloomcompactor/mergecompactor.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package bloomcompactor

import (
"context"
"fmt"

"github.com/grafana/dskit/concurrency"

Expand Down Expand Up @@ -96,11 +95,13 @@ func createPopulateFunc(ctx context.Context, job Job, storeClient storeClient, b
}
}

batchesIterator, err := newChunkBatchesIterator(ctx, storeClient.chunk, chunkRefs, limits.BloomCompactorChunksBatchSize(job.tenantID))
if err != nil {
return fmt.Errorf("error creating chunks batches iterator: %w", err)
}
err = bt.PopulateSeriesWithBloom(&bloomForChks, batchesIterator)
// batchesIterator, err := newChunkBatchesIterator(ctx, storeClient.chunk, chunkRefs, limits.BloomCompactorChunksBatchSize(job.tenantID))
// if err != nil {
// return fmt.Errorf("error creating chunks batches iterator: %w", err)
// }
// NB(owen-d): this panics/etc, but the code is being refactored and will be removed.
// I've replaced `batchesIterator` with `nil` to pass compiler checks while keeping this code around as reference
err := bt.Populate(&bloomForChks, nil)
if err != nil {
return err
}
Expand Down
11 changes: 11 additions & 0 deletions pkg/bloomcompactor/v2spec_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,21 @@ func blocksFromSchemaWithRange(t *testing.T, n int, options v1.BlockOptions, fro
return res, data
}

// doesn't actually load any chunks
type dummyChunkLoader struct{}

func (_ dummyChunkLoader) Load(_ context.Context, series *v1.Series) (*chunkItersByFingerprint, error) {
return &chunkItersByFingerprint{
fp: series.Fingerprint,
itr: v1.NewEmptyIter[v1.ChunkRefWithIter](),
}, nil
}

func dummyBloomGen(opts v1.BlockOptions, store v1.Iterator[*v1.Series], blocks []*v1.Block) *SimpleBloomGenerator {
return NewSimpleBloomGenerator(
opts,
store,
dummyChunkLoader{},
blocks,
func() (v1.BlockWriter, v1.BlockReader) {
indexBuf := bytes.NewBuffer(nil)
Expand Down
4 changes: 2 additions & 2 deletions pkg/storage/bloom/v1/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,8 @@ func (it *EmptyIter[T]) At() T {
// noop
func (it *EmptyIter[T]) Reset() {}

func NewEmptyIter[T any](zero T) *EmptyIter[T] {
return &EmptyIter[T]{zero: zero}
func NewEmptyIter[T any]() *EmptyIter[T] {
return &EmptyIter[T]{}
}

type CancellableIter[T any] struct {
Expand Down
11 changes: 0 additions & 11 deletions pkg/storage/stores/shipper/indexshipper/tsdb/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/model/labels"

"github.com/grafana/loki/pkg/logproto"
"github.com/grafana/loki/pkg/storage/chunk"
"github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index"
)
Expand All @@ -23,16 +22,6 @@ type ChunkRef struct {
Checksum uint32
}

func (r ChunkRef) LogProto() logproto.ChunkRef {
return logproto.ChunkRef{
UserID: r.User,
Fingerprint: uint64(r.Fingerprint),
From: r.Start,
Through: r.End,
Checksum: r.Checksum,
}
}

// Compares by (Start, End)
// Assumes User is equivalent
func (r ChunkRef) Less(x ChunkRef) bool {
Expand Down

0 comments on commit 7f3a7bd

Please sign in to comment.