Switch to bbolt
This commit is contained in:
parent
360bed00f9
commit
77543e3aed
617 changed files with 68468 additions and 97867 deletions
4
vendor/github.com/blevesearch/bleve/search/collector/heap.go
generated
vendored
4
vendor/github.com/blevesearch/bleve/search/collector/heap.go
generated
vendored
|
@ -25,9 +25,9 @@ type collectStoreHeap struct {
|
|||
compare collectorCompare
|
||||
}
|
||||
|
||||
func newStoreHeap(cap int, compare collectorCompare) *collectStoreHeap {
|
||||
func newStoreHeap(capacity int, compare collectorCompare) *collectStoreHeap {
|
||||
rv := &collectStoreHeap{
|
||||
heap: make(search.DocumentMatchCollection, 0, cap),
|
||||
heap: make(search.DocumentMatchCollection, 0, capacity),
|
||||
compare: compare,
|
||||
}
|
||||
heap.Init(rv)
|
||||
|
|
5
vendor/github.com/blevesearch/bleve/search/collector/list.go
generated
vendored
5
vendor/github.com/blevesearch/bleve/search/collector/list.go
generated
vendored
|
@ -25,7 +25,7 @@ type collectStoreList struct {
|
|||
compare collectorCompare
|
||||
}
|
||||
|
||||
func newStoreList(cap int, compare collectorCompare) *collectStoreList {
|
||||
func newStoreList(capacity int, compare collectorCompare) *collectStoreList {
|
||||
rv := &collectStoreList{
|
||||
results: list.New(),
|
||||
compare: compare,
|
||||
|
@ -34,8 +34,7 @@ func newStoreList(cap int, compare collectorCompare) *collectStoreList {
|
|||
return rv
|
||||
}
|
||||
|
||||
func (c *collectStoreList) AddNotExceedingSize(doc *search.DocumentMatch,
|
||||
size int) *search.DocumentMatch {
|
||||
func (c *collectStoreList) AddNotExceedingSize(doc *search.DocumentMatch, size int) *search.DocumentMatch {
|
||||
c.add(doc)
|
||||
if c.len() > size {
|
||||
return c.removeLast()
|
||||
|
|
4
vendor/github.com/blevesearch/bleve/search/collector/slice.go
generated
vendored
4
vendor/github.com/blevesearch/bleve/search/collector/slice.go
generated
vendored
|
@ -21,9 +21,9 @@ type collectStoreSlice struct {
|
|||
compare collectorCompare
|
||||
}
|
||||
|
||||
func newStoreSlice(cap int, compare collectorCompare) *collectStoreSlice {
|
||||
func newStoreSlice(capacity int, compare collectorCompare) *collectStoreSlice {
|
||||
rv := &collectStoreSlice{
|
||||
slice: make(search.DocumentMatchCollection, 0, cap),
|
||||
slice: make(search.DocumentMatchCollection, 0, capacity),
|
||||
compare: compare,
|
||||
}
|
||||
return rv
|
||||
|
|
144
vendor/github.com/blevesearch/bleve/search/collector/topn.go
generated
vendored
144
vendor/github.com/blevesearch/bleve/search/collector/topn.go
generated
vendored
|
@ -17,6 +17,7 @@ package collector
|
|||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/blevesearch/bleve/index"
|
||||
|
@ -69,6 +70,7 @@ type TopNCollector struct {
|
|||
lowestMatchOutsideResults *search.DocumentMatch
|
||||
updateFieldVisitor index.DocumentFieldTermVisitor
|
||||
dvReader index.DocValueReader
|
||||
searchAfter *search.DocumentMatch
|
||||
}
|
||||
|
||||
// CheckDoneEvery controls how frequently we check the context deadline
|
||||
|
@ -78,6 +80,33 @@ const CheckDoneEvery = uint64(1024)
|
|||
// skipping over the first 'skip' hits
|
||||
// ordering hits by the provided sort order
|
||||
func NewTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector {
|
||||
return newTopNCollector(size, skip, sort)
|
||||
}
|
||||
|
||||
// NewTopNCollector builds a collector to find the top 'size' hits
|
||||
// skipping over the first 'skip' hits
|
||||
// ordering hits by the provided sort order
|
||||
func NewTopNCollectorAfter(size int, sort search.SortOrder, after []string) *TopNCollector {
|
||||
rv := newTopNCollector(size, 0, sort)
|
||||
rv.searchAfter = &search.DocumentMatch{
|
||||
Sort: after,
|
||||
}
|
||||
|
||||
for pos, ss := range sort {
|
||||
if ss.RequiresDocID() {
|
||||
rv.searchAfter.ID = after[pos]
|
||||
}
|
||||
if ss.RequiresScoring() {
|
||||
if score, err := strconv.ParseFloat(after[pos], 64); err == nil {
|
||||
rv.searchAfter.Score = score
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func newTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector {
|
||||
hc := &TopNCollector{size: size, skip: skip, sort: sort}
|
||||
|
||||
// pre-allocate space on the store to avoid reslicing
|
||||
|
@ -140,6 +169,8 @@ func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher,
|
|||
}
|
||||
searchContext := &search.SearchContext{
|
||||
DocumentMatchPool: search.NewDocumentMatchPool(backingSize+searcher.DocumentMatchPoolSize(), len(hc.sort)),
|
||||
Collector: hc,
|
||||
IndexReader: reader,
|
||||
}
|
||||
|
||||
hc.dvReader, err = reader.DocValueReader(hc.neededFields)
|
||||
|
@ -154,6 +185,19 @@ func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher,
|
|||
hc.sort.UpdateVisitor(field, term)
|
||||
}
|
||||
|
||||
dmHandlerMaker := MakeTopNDocumentMatchHandler
|
||||
if cv := ctx.Value(search.MakeDocumentMatchHandlerKey); cv != nil {
|
||||
dmHandlerMaker = cv.(search.MakeDocumentMatchHandler)
|
||||
}
|
||||
// use the application given builder for making the custom document match
|
||||
// handler and perform callbacks/invocations on the newly made handler.
|
||||
dmHandler, loadID, err := dmHandlerMaker(searchContext)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
hc.needDocIds = hc.needDocIds || loadID
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
|
@ -169,13 +213,26 @@ func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher,
|
|||
}
|
||||
}
|
||||
|
||||
err = hc.collectSingle(searchContext, reader, next)
|
||||
err = hc.prepareDocumentMatch(searchContext, reader, next)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
err = dmHandler(next)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
next, err = searcher.Next(searchContext)
|
||||
}
|
||||
|
||||
// help finalize/flush the results in case
|
||||
// of custom document match handlers.
|
||||
err = dmHandler(nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// compute search duration
|
||||
hc.took = time.Since(startTime)
|
||||
if err != nil {
|
||||
|
@ -191,8 +248,8 @@ func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher,
|
|||
|
||||
var sortByScoreOpt = []string{"_score"}
|
||||
|
||||
func (hc *TopNCollector) collectSingle(ctx *search.SearchContext, reader index.IndexReader, d *search.DocumentMatch) error {
|
||||
var err error
|
||||
func (hc *TopNCollector) prepareDocumentMatch(ctx *search.SearchContext,
|
||||
reader index.IndexReader, d *search.DocumentMatch) (err error) {
|
||||
|
||||
// visit field terms for features that require it (sort, facets)
|
||||
if len(hc.neededFields) > 0 {
|
||||
|
@ -226,35 +283,64 @@ func (hc *TopNCollector) collectSingle(ctx *search.SearchContext, reader index.I
|
|||
hc.sort.Value(d)
|
||||
}
|
||||
|
||||
// optimization, we track lowest sorting hit already removed from heap
|
||||
// with this one comparison, we can avoid all heap operations if
|
||||
// this hit would have been added and then immediately removed
|
||||
if hc.lowestMatchOutsideResults != nil {
|
||||
cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, d, hc.lowestMatchOutsideResults)
|
||||
if cmp >= 0 {
|
||||
// this hit can't possibly be in the result set, so avoid heap ops
|
||||
ctx.DocumentMatchPool.Put(d)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
removed := hc.store.AddNotExceedingSize(d, hc.size+hc.skip)
|
||||
if removed != nil {
|
||||
if hc.lowestMatchOutsideResults == nil {
|
||||
hc.lowestMatchOutsideResults = removed
|
||||
} else {
|
||||
cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, removed, hc.lowestMatchOutsideResults)
|
||||
if cmp < 0 {
|
||||
tmp := hc.lowestMatchOutsideResults
|
||||
hc.lowestMatchOutsideResults = removed
|
||||
ctx.DocumentMatchPool.Put(tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func MakeTopNDocumentMatchHandler(
|
||||
ctx *search.SearchContext) (search.DocumentMatchHandler, bool, error) {
|
||||
var hc *TopNCollector
|
||||
var ok bool
|
||||
if hc, ok = ctx.Collector.(*TopNCollector); ok {
|
||||
return func(d *search.DocumentMatch) error {
|
||||
if d == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// support search after based pagination,
|
||||
// if this hit is <= the search after sort key
|
||||
// we should skip it
|
||||
if hc.searchAfter != nil {
|
||||
// exact sort order matches use hit number to break tie
|
||||
// but we want to allow for exact match, so we pretend
|
||||
hc.searchAfter.HitNumber = d.HitNumber
|
||||
if hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, d, hc.searchAfter) <= 0 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// optimization, we track lowest sorting hit already removed from heap
|
||||
// with this one comparison, we can avoid all heap operations if
|
||||
// this hit would have been added and then immediately removed
|
||||
if hc.lowestMatchOutsideResults != nil {
|
||||
cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, d,
|
||||
hc.lowestMatchOutsideResults)
|
||||
if cmp >= 0 {
|
||||
// this hit can't possibly be in the result set, so avoid heap ops
|
||||
ctx.DocumentMatchPool.Put(d)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
removed := hc.store.AddNotExceedingSize(d, hc.size+hc.skip)
|
||||
if removed != nil {
|
||||
if hc.lowestMatchOutsideResults == nil {
|
||||
hc.lowestMatchOutsideResults = removed
|
||||
} else {
|
||||
cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc,
|
||||
removed, hc.lowestMatchOutsideResults)
|
||||
if cmp < 0 {
|
||||
tmp := hc.lowestMatchOutsideResults
|
||||
hc.lowestMatchOutsideResults = removed
|
||||
ctx.DocumentMatchPool.Put(tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}, false, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
// visitFieldTerms is responsible for visiting the field terms of the
|
||||
// search hit, and passing visited terms to the sort and facet builder
|
||||
func (hc *TopNCollector) visitFieldTerms(reader index.IndexReader, d *search.DocumentMatch) error {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue