Switch to bbolt
This commit is contained in:
parent
360bed00f9
commit
77543e3aed
617 changed files with 68468 additions and 97867 deletions
90
vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go
generated
vendored
90
vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go
generated
vendored
|
@ -45,6 +45,7 @@ type BooleanSearcher struct {
|
|||
scorer *scorer.ConjunctionQueryScorer
|
||||
matches []*search.DocumentMatch
|
||||
initialized bool
|
||||
done bool
|
||||
}
|
||||
|
||||
func NewBooleanSearcher(indexReader index.IndexReader, mustSearcher search.Searcher, shouldSearcher search.Searcher, mustNotSearcher search.Searcher, options search.SearcherOptions) (*BooleanSearcher, error) {
|
||||
|
@ -207,6 +208,10 @@ func (s *BooleanSearcher) SetQueryNorm(qnorm float64) {
|
|||
|
||||
func (s *BooleanSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) {
|
||||
|
||||
if s.done {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if !s.initialized {
|
||||
err := s.initSearchers(ctx)
|
||||
if err != nil {
|
||||
|
@ -319,11 +324,20 @@ func (s *BooleanSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch
|
|||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if rv == nil {
|
||||
s.done = true
|
||||
}
|
||||
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
func (s *BooleanSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) {
|
||||
|
||||
if s.done {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if !s.initialized {
|
||||
err := s.initSearchers(ctx)
|
||||
if err != nil {
|
||||
|
@ -331,41 +345,51 @@ func (s *BooleanSearcher) Advance(ctx *search.SearchContext, ID index.IndexInter
|
|||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
if s.mustSearcher != nil {
|
||||
if s.currMust != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currMust)
|
||||
// Advance the searcher only if the cursor is trailing the lookup ID
|
||||
if s.currentID == nil || s.currentID.Compare(ID) < 0 {
|
||||
var err error
|
||||
if s.mustSearcher != nil {
|
||||
if s.currMust != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currMust)
|
||||
}
|
||||
s.currMust, err = s.mustSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
s.currMust, err = s.mustSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if s.shouldSearcher != nil {
|
||||
if s.currShould != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currShould)
|
||||
}
|
||||
s.currShould, err = s.shouldSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if s.mustNotSearcher != nil {
|
||||
if s.currMustNot != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currMustNot)
|
||||
}
|
||||
s.currMustNot, err = s.mustNotSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if s.mustSearcher != nil && s.currMust != nil {
|
||||
s.currentID = s.currMust.IndexInternalID
|
||||
} else if s.mustSearcher == nil && s.currShould != nil {
|
||||
s.currentID = s.currShould.IndexInternalID
|
||||
} else {
|
||||
s.currentID = nil
|
||||
if s.shouldSearcher != nil {
|
||||
if s.currShould != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currShould)
|
||||
}
|
||||
s.currShould, err = s.shouldSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if s.mustNotSearcher != nil {
|
||||
// Additional check for mustNotSearcher, whose cursor isn't tracked by
|
||||
// currentID to prevent it from moving when the searcher's tracked
|
||||
// position is already ahead of or at the requested ID.
|
||||
if s.currMustNot == nil || s.currMustNot.IndexInternalID.Compare(ID) < 0 {
|
||||
if s.currMustNot != nil {
|
||||
ctx.DocumentMatchPool.Put(s.currMustNot)
|
||||
}
|
||||
s.currMustNot, err = s.mustNotSearcher.Advance(ctx, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.mustSearcher != nil && s.currMust != nil {
|
||||
s.currentID = s.currMust.IndexInternalID
|
||||
} else if s.mustSearcher == nil && s.currShould != nil {
|
||||
s.currentID = s.currShould.IndexInternalID
|
||||
} else {
|
||||
s.currentID = nil
|
||||
}
|
||||
}
|
||||
|
||||
return s.Next(ctx)
|
||||
|
|
43
vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go
generated
vendored
43
vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go
generated
vendored
|
@ -43,14 +43,27 @@ type ConjunctionSearcher struct {
|
|||
options search.SearcherOptions
|
||||
}
|
||||
|
||||
func NewConjunctionSearcher(indexReader index.IndexReader, qsearchers []search.Searcher, options search.SearcherOptions) (*ConjunctionSearcher, error) {
|
||||
// build the downstream searchers
|
||||
func NewConjunctionSearcher(indexReader index.IndexReader,
|
||||
qsearchers []search.Searcher, options search.SearcherOptions) (
|
||||
search.Searcher, error) {
|
||||
// build the sorted downstream searchers
|
||||
searchers := make(OrderedSearcherList, len(qsearchers))
|
||||
for i, searcher := range qsearchers {
|
||||
searchers[i] = searcher
|
||||
}
|
||||
// sort the searchers
|
||||
sort.Sort(searchers)
|
||||
|
||||
// attempt the "unadorned" conjunction optimization only when we
|
||||
// do not need extra information like freq-norm's or term vectors
|
||||
if len(searchers) > 1 &&
|
||||
options.Score == "none" && !options.IncludeTermVectors {
|
||||
rv, err := optimizeCompositeSearcher("conjunction:unadorned",
|
||||
indexReader, searchers, options)
|
||||
if err != nil || rv != nil {
|
||||
return rv, err
|
||||
}
|
||||
}
|
||||
|
||||
// build our searcher
|
||||
rv := ConjunctionSearcher{
|
||||
indexReader: indexReader,
|
||||
|
@ -63,24 +76,10 @@ func NewConjunctionSearcher(indexReader index.IndexReader, qsearchers []search.S
|
|||
|
||||
// attempt push-down conjunction optimization when there's >1 searchers
|
||||
if len(searchers) > 1 {
|
||||
var octx index.OptimizableContext
|
||||
|
||||
for _, searcher := range searchers {
|
||||
o, ok := searcher.(index.Optimizable)
|
||||
if ok {
|
||||
var err error
|
||||
octx, err = o.Optimize("conjunction", octx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if octx != nil {
|
||||
err := octx.Finish()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rv, err := optimizeCompositeSearcher("conjunction",
|
||||
indexReader, searchers, options)
|
||||
if err != nil || rv != nil {
|
||||
return rv, err
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +157,7 @@ func (s *ConjunctionSearcher) Next(ctx *search.SearchContext) (*search.DocumentM
|
|||
var rv *search.DocumentMatch
|
||||
var err error
|
||||
OUTER:
|
||||
for s.currs[s.maxIDIdx] != nil {
|
||||
for s.maxIDIdx < len(s.currs) && s.currs[s.maxIDIdx] != nil {
|
||||
maxID := s.currs[s.maxIDIdx].IndexInternalID
|
||||
|
||||
i := 0
|
||||
|
|
54
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go
generated
vendored
54
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go
generated
vendored
|
@ -40,6 +40,18 @@ func NewDisjunctionSearcher(indexReader index.IndexReader,
|
|||
func newDisjunctionSearcher(indexReader index.IndexReader,
|
||||
qsearchers []search.Searcher, min float64, options search.SearcherOptions,
|
||||
limit bool) (search.Searcher, error) {
|
||||
// attempt the "unadorned" disjunction optimization only when we
|
||||
// do not need extra information like freq-norm's or term vectors
|
||||
// and the requested min is simple
|
||||
if len(qsearchers) > 1 && min <= 1 &&
|
||||
options.Score == "none" && !options.IncludeTermVectors {
|
||||
rv, err := optimizeCompositeSearcher("disjunction:unadorned",
|
||||
indexReader, qsearchers, options)
|
||||
if err != nil || rv != nil {
|
||||
return rv, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(qsearchers) > DisjunctionHeapTakeover {
|
||||
return newDisjunctionHeapSearcher(indexReader, qsearchers, min, options,
|
||||
limit)
|
||||
|
@ -48,6 +60,42 @@ func newDisjunctionSearcher(indexReader index.IndexReader,
|
|||
limit)
|
||||
}
|
||||
|
||||
func optimizeCompositeSearcher(optimizationKind string,
|
||||
indexReader index.IndexReader, qsearchers []search.Searcher,
|
||||
options search.SearcherOptions) (search.Searcher, error) {
|
||||
var octx index.OptimizableContext
|
||||
|
||||
for _, searcher := range qsearchers {
|
||||
o, ok := searcher.(index.Optimizable)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var err error
|
||||
octx, err = o.Optimize(optimizationKind, octx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if octx == nil {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
optimized, err := octx.Finish()
|
||||
if err != nil || optimized == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tfr, ok := optimized.(index.TermFieldReader)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return newTermSearcherFromReader(indexReader, tfr,
|
||||
[]byte(optimizationKind), "*", 1.0, options)
|
||||
}
|
||||
|
||||
func tooManyClauses(count int) bool {
|
||||
if DisjunctionMaxClauseCount != 0 && count > DisjunctionMaxClauseCount {
|
||||
return true
|
||||
|
@ -55,7 +103,7 @@ func tooManyClauses(count int) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func tooManyClausesErr() error {
|
||||
return fmt.Errorf("TooManyClauses[maxClauseCount is set to %d]",
|
||||
DisjunctionMaxClauseCount)
|
||||
func tooManyClausesErr(count int) error {
|
||||
return fmt.Errorf("TooManyClauses[%d > maxClauseCount, which is set to %d]",
|
||||
count, DisjunctionMaxClauseCount)
|
||||
}
|
||||
|
|
2
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go
generated
vendored
2
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go
generated
vendored
|
@ -62,7 +62,7 @@ func newDisjunctionHeapSearcher(indexReader index.IndexReader,
|
|||
limit bool) (
|
||||
*DisjunctionHeapSearcher, error) {
|
||||
if limit && tooManyClauses(len(searchers)) {
|
||||
return nil, tooManyClausesErr()
|
||||
return nil, tooManyClausesErr(len(searchers))
|
||||
}
|
||||
|
||||
// build our searcher
|
||||
|
|
2
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go
generated
vendored
2
vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go
generated
vendored
|
@ -50,7 +50,7 @@ func newDisjunctionSliceSearcher(indexReader index.IndexReader,
|
|||
limit bool) (
|
||||
*DisjunctionSliceSearcher, error) {
|
||||
if limit && tooManyClauses(len(qsearchers)) {
|
||||
return nil, tooManyClausesErr()
|
||||
return nil, tooManyClausesErr(len(qsearchers))
|
||||
}
|
||||
// build the downstream searchers
|
||||
searchers := make(OrderedSearcherList, len(qsearchers))
|
||||
|
|
54
vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go
generated
vendored
54
vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go
generated
vendored
|
@ -31,6 +31,10 @@ func NewFuzzySearcher(indexReader index.IndexReader, term string,
|
|||
return nil, fmt.Errorf("fuzziness exceeds max (%d)", MaxFuzziness)
|
||||
}
|
||||
|
||||
if fuzziness < 0 {
|
||||
return nil, fmt.Errorf("invalid fuzziness, negative")
|
||||
}
|
||||
|
||||
// Note: we don't byte slice the term for a prefix because of runes.
|
||||
prefixTerm := ""
|
||||
for i, r := range term {
|
||||
|
@ -53,32 +57,40 @@ func NewFuzzySearcher(indexReader index.IndexReader, term string,
|
|||
func findFuzzyCandidateTerms(indexReader index.IndexReader, term string,
|
||||
fuzziness int, field, prefixTerm string) (rv []string, err error) {
|
||||
rv = make([]string, 0)
|
||||
|
||||
// in case of advanced reader implementations directly call
|
||||
// the levenshtein automaton based iterator to collect the
|
||||
// candidate terms
|
||||
if ir, ok := indexReader.(index.IndexReaderFuzzy); ok {
|
||||
fieldDict, err := ir.FieldDictFuzzy(field, term, fuzziness, prefixTerm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
rv = append(rv, tfd.Term)
|
||||
if tooManyClauses(len(rv)) {
|
||||
return nil, tooManyClausesErr(len(rv))
|
||||
}
|
||||
tfd, err = fieldDict.Next()
|
||||
}
|
||||
return rv, err
|
||||
}
|
||||
|
||||
var fieldDict index.FieldDict
|
||||
if len(prefixTerm) > 0 {
|
||||
fieldDict, err = indexReader.FieldDictPrefix(field, []byte(prefixTerm))
|
||||
} else {
|
||||
// in case of advanced reader implementations directly call
|
||||
// the levenshtein automaton based iterator to collect the
|
||||
// candidate terms
|
||||
if ir, ok := indexReader.(index.IndexReaderFuzzy); ok {
|
||||
fieldDict, err = ir.FieldDictFuzzy(field, []byte(term), fuzziness)
|
||||
if err != nil {
|
||||
return rv, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
rv = append(rv, tfd.Term)
|
||||
tfd, err = fieldDict.Next()
|
||||
}
|
||||
return rv, err
|
||||
}
|
||||
fieldDict, err = indexReader.FieldDict(field)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
|
@ -95,7 +107,7 @@ func findFuzzyCandidateTerms(indexReader index.IndexReader, term string,
|
|||
if !exceeded && ld <= fuzziness {
|
||||
rv = append(rv, tfd.Term)
|
||||
if tooManyClauses(len(rv)) {
|
||||
return rv, tooManyClausesErr()
|
||||
return nil, tooManyClausesErr(len(rv))
|
||||
}
|
||||
}
|
||||
tfd, err = fieldDict.Next()
|
||||
|
|
223
vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go
generated
vendored
223
vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go
generated
vendored
|
@ -22,6 +22,11 @@ import (
|
|||
"github.com/blevesearch/bleve/search"
|
||||
)
|
||||
|
||||
type filterFunc func(key []byte) bool
|
||||
|
||||
var GeoBitsShift1 = (geo.GeoBits << 1)
|
||||
var GeoBitsShift1Minus1 = GeoBitsShift1 - 1
|
||||
|
||||
func NewGeoBoundingBoxSearcher(indexReader index.IndexReader, minLon, minLat,
|
||||
maxLon, maxLat float64, field string, boost float64,
|
||||
options search.SearcherOptions, checkBoundaries bool) (
|
||||
|
@ -36,10 +41,18 @@ func NewGeoBoundingBoxSearcher(indexReader index.IndexReader, minLon, minLat,
|
|||
}
|
||||
|
||||
// do math to produce list of terms needed for this search
|
||||
onBoundaryTerms, notOnBoundaryTerms := ComputeGeoRange(0, (geo.GeoBits<<1)-1,
|
||||
minLon, minLat, maxLon, maxLat, checkBoundaries)
|
||||
onBoundaryTerms, notOnBoundaryTerms, err := ComputeGeoRange(0, GeoBitsShift1Minus1,
|
||||
minLon, minLat, maxLon, maxLat, checkBoundaries, indexReader, field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var onBoundarySearcher search.Searcher
|
||||
dvReader, err := indexReader.DocValueReader([]string{field})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(onBoundaryTerms) > 0 {
|
||||
rawOnBoundarySearcher, err := NewMultiTermSearcherBytes(indexReader,
|
||||
onBoundaryTerms, field, boost, options, false)
|
||||
|
@ -48,7 +61,7 @@ func NewGeoBoundingBoxSearcher(indexReader index.IndexReader, minLon, minLat,
|
|||
}
|
||||
// add filter to check points near the boundary
|
||||
onBoundarySearcher = NewFilteringSearcher(rawOnBoundarySearcher,
|
||||
buildRectFilter(indexReader, field, minLon, minLat, maxLon, maxLat))
|
||||
buildRectFilter(dvReader, field, minLon, minLat, maxLon, maxLat))
|
||||
openedSearchers = append(openedSearchers, onBoundarySearcher)
|
||||
}
|
||||
|
||||
|
@ -89,84 +102,152 @@ var geoMaxShift = document.GeoPrecisionStep * 4
|
|||
var geoDetailLevel = ((geo.GeoBits << 1) - geoMaxShift) / 2
|
||||
|
||||
func ComputeGeoRange(term uint64, shift uint,
|
||||
sminLon, sminLat, smaxLon, smaxLat float64,
|
||||
checkBoundaries bool) (
|
||||
onBoundary [][]byte, notOnBoundary [][]byte) {
|
||||
split := term | uint64(0x1)<<shift
|
||||
var upperMax uint64
|
||||
if shift < 63 {
|
||||
upperMax = term | ((uint64(1) << (shift + 1)) - 1)
|
||||
} else {
|
||||
upperMax = 0xffffffffffffffff
|
||||
}
|
||||
lowerMax := split - 1
|
||||
onBoundary, notOnBoundary = relateAndRecurse(term, lowerMax, shift,
|
||||
sminLon, sminLat, smaxLon, smaxLat, checkBoundaries)
|
||||
plusOnBoundary, plusNotOnBoundary := relateAndRecurse(split, upperMax, shift,
|
||||
sminLon, sminLat, smaxLon, smaxLat, checkBoundaries)
|
||||
onBoundary = append(onBoundary, plusOnBoundary...)
|
||||
notOnBoundary = append(notOnBoundary, plusNotOnBoundary...)
|
||||
return
|
||||
}
|
||||
sminLon, sminLat, smaxLon, smaxLat float64, checkBoundaries bool,
|
||||
indexReader index.IndexReader, field string) (
|
||||
onBoundary [][]byte, notOnBoundary [][]byte, err error) {
|
||||
preallocBytesLen := 32
|
||||
preallocBytes := make([]byte, preallocBytesLen)
|
||||
|
||||
func relateAndRecurse(start, end uint64, res uint,
|
||||
sminLon, sminLat, smaxLon, smaxLat float64,
|
||||
checkBoundaries bool) (
|
||||
onBoundary [][]byte, notOnBoundary [][]byte) {
|
||||
minLon := geo.MortonUnhashLon(start)
|
||||
minLat := geo.MortonUnhashLat(start)
|
||||
maxLon := geo.MortonUnhashLon(end)
|
||||
maxLat := geo.MortonUnhashLat(end)
|
||||
|
||||
level := ((geo.GeoBits << 1) - res) >> 1
|
||||
|
||||
within := res%document.GeoPrecisionStep == 0 &&
|
||||
geo.RectWithin(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat)
|
||||
if within || (level == geoDetailLevel &&
|
||||
geo.RectIntersects(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat)) {
|
||||
if !within && checkBoundaries {
|
||||
return [][]byte{
|
||||
numeric.MustNewPrefixCodedInt64(int64(start), res),
|
||||
}, nil
|
||||
makePrefixCoded := func(in int64, shift uint) (rv numeric.PrefixCoded) {
|
||||
if len(preallocBytes) <= 0 {
|
||||
preallocBytesLen = preallocBytesLen * 2
|
||||
preallocBytes = make([]byte, preallocBytesLen)
|
||||
}
|
||||
return nil,
|
||||
[][]byte{
|
||||
numeric.MustNewPrefixCodedInt64(int64(start), res),
|
||||
}
|
||||
} else if level < geoDetailLevel &&
|
||||
geo.RectIntersects(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat) {
|
||||
return ComputeGeoRange(start, res-1, sminLon, sminLat, smaxLon, smaxLat,
|
||||
checkBoundaries)
|
||||
|
||||
rv, preallocBytes, err =
|
||||
numeric.NewPrefixCodedInt64Prealloc(in, shift, preallocBytes)
|
||||
|
||||
return rv
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
var fieldDict index.FieldDictContains
|
||||
var isIndexed filterFunc
|
||||
if irr, ok := indexReader.(index.IndexReaderContains); ok {
|
||||
fieldDict, err = irr.FieldDictContains(field)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
isIndexed = func(term []byte) bool {
|
||||
found, err := fieldDict.Contains(term)
|
||||
return err == nil && found
|
||||
}
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if fieldDict != nil {
|
||||
if fd, ok := fieldDict.(index.FieldDict); ok {
|
||||
cerr := fd.Close()
|
||||
if cerr != nil {
|
||||
err = cerr
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
if isIndexed == nil {
|
||||
isIndexed = func(term []byte) bool {
|
||||
if indexReader != nil {
|
||||
reader, err := indexReader.TermFieldReader(term, field, false, false, false)
|
||||
if err != nil || reader == nil {
|
||||
return false
|
||||
}
|
||||
if reader.Count() == 0 {
|
||||
_ = reader.Close()
|
||||
return false
|
||||
}
|
||||
_ = reader.Close()
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
var computeGeoRange func(term uint64, shift uint) // declare for recursion
|
||||
|
||||
relateAndRecurse := func(start, end uint64, res, level uint) {
|
||||
minLon := geo.MortonUnhashLon(start)
|
||||
minLat := geo.MortonUnhashLat(start)
|
||||
maxLon := geo.MortonUnhashLon(end)
|
||||
maxLat := geo.MortonUnhashLat(end)
|
||||
|
||||
within := res%document.GeoPrecisionStep == 0 &&
|
||||
geo.RectWithin(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat)
|
||||
if within || (level == geoDetailLevel &&
|
||||
geo.RectIntersects(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat)) {
|
||||
codedTerm := makePrefixCoded(int64(start), res)
|
||||
if isIndexed(codedTerm) {
|
||||
if !within && checkBoundaries {
|
||||
onBoundary = append(onBoundary, codedTerm)
|
||||
} else {
|
||||
notOnBoundary = append(notOnBoundary, codedTerm)
|
||||
}
|
||||
}
|
||||
} else if level < geoDetailLevel &&
|
||||
geo.RectIntersects(minLon, minLat, maxLon, maxLat,
|
||||
sminLon, sminLat, smaxLon, smaxLat) {
|
||||
computeGeoRange(start, res-1)
|
||||
}
|
||||
}
|
||||
|
||||
computeGeoRange = func(term uint64, shift uint) {
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
split := term | uint64(0x1)<<shift
|
||||
var upperMax uint64
|
||||
if shift < 63 {
|
||||
upperMax = term | ((uint64(1) << (shift + 1)) - 1)
|
||||
} else {
|
||||
upperMax = 0xffffffffffffffff
|
||||
}
|
||||
|
||||
lowerMax := split - 1
|
||||
|
||||
level := (GeoBitsShift1 - shift) >> 1
|
||||
|
||||
relateAndRecurse(term, lowerMax, shift, level)
|
||||
relateAndRecurse(split, upperMax, shift, level)
|
||||
}
|
||||
|
||||
computeGeoRange(term, shift)
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return onBoundary, notOnBoundary, err
|
||||
}
|
||||
|
||||
func buildRectFilter(indexReader index.IndexReader, field string,
|
||||
func buildRectFilter(dvReader index.DocValueReader, field string,
|
||||
minLon, minLat, maxLon, maxLat float64) FilterFunc {
|
||||
return func(d *search.DocumentMatch) bool {
|
||||
var lon, lat float64
|
||||
// check geo matches against all numeric type terms indexed
|
||||
var lons, lats []float64
|
||||
var found bool
|
||||
err := indexReader.DocumentVisitFieldTerms(d.IndexInternalID,
|
||||
[]string{field}, func(field string, term []byte) {
|
||||
// only consider the values which are shifted 0
|
||||
prefixCoded := numeric.PrefixCoded(term)
|
||||
shift, err := prefixCoded.Shift()
|
||||
if err == nil && shift == 0 {
|
||||
var i64 int64
|
||||
i64, err = prefixCoded.Int64()
|
||||
if err == nil {
|
||||
lon = geo.MortonUnhashLon(uint64(i64))
|
||||
lat = geo.MortonUnhashLat(uint64(i64))
|
||||
found = true
|
||||
}
|
||||
err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) {
|
||||
// only consider the values which are shifted 0
|
||||
prefixCoded := numeric.PrefixCoded(term)
|
||||
shift, err := prefixCoded.Shift()
|
||||
if err == nil && shift == 0 {
|
||||
var i64 int64
|
||||
i64, err = prefixCoded.Int64()
|
||||
if err == nil {
|
||||
lons = append(lons, geo.MortonUnhashLon(uint64(i64)))
|
||||
lats = append(lats, geo.MortonUnhashLat(uint64(i64)))
|
||||
found = true
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
if err == nil && found {
|
||||
return geo.BoundingBoxContains(lon, lat,
|
||||
minLon, minLat, maxLon, maxLat)
|
||||
for i := range lons {
|
||||
if geo.BoundingBoxContains(lons[i], lats[i],
|
||||
minLon, minLat, maxLon, maxLat) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
59
vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go
generated
vendored
59
vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go
generated
vendored
|
@ -34,14 +34,19 @@ func NewGeoPointDistanceSearcher(indexReader index.IndexReader, centerLon,
|
|||
// build a searcher for the box
|
||||
boxSearcher, err := boxSearcher(indexReader,
|
||||
topLeftLon, topLeftLat, bottomRightLon, bottomRightLat,
|
||||
field, boost, options)
|
||||
field, boost, options, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dvReader, err := indexReader.DocValueReader([]string{field})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// wrap it in a filtering searcher which checks the actual distance
|
||||
return NewFilteringSearcher(boxSearcher,
|
||||
buildDistFilter(indexReader, field, centerLon, centerLat, dist)), nil
|
||||
buildDistFilter(dvReader, field, centerLon, centerLat, dist)), nil
|
||||
}
|
||||
|
||||
// boxSearcher builds a searcher for the described bounding box
|
||||
|
@ -49,19 +54,20 @@ func NewGeoPointDistanceSearcher(indexReader index.IndexReader, centerLon,
|
|||
// two boxes joined through a disjunction searcher
|
||||
func boxSearcher(indexReader index.IndexReader,
|
||||
topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64,
|
||||
field string, boost float64, options search.SearcherOptions) (
|
||||
field string, boost float64, options search.SearcherOptions, checkBoundaries bool) (
|
||||
search.Searcher, error) {
|
||||
if bottomRightLon < topLeftLon {
|
||||
// cross date line, rewrite as two parts
|
||||
|
||||
leftSearcher, err := NewGeoBoundingBoxSearcher(indexReader,
|
||||
-180, bottomRightLat, bottomRightLon, topLeftLat,
|
||||
field, boost, options, false)
|
||||
field, boost, options, checkBoundaries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rightSearcher, err := NewGeoBoundingBoxSearcher(indexReader,
|
||||
topLeftLon, bottomRightLat, 180, topLeftLat, field, boost, options, false)
|
||||
topLeftLon, bottomRightLat, 180, topLeftLat, field, boost, options,
|
||||
checkBoundaries)
|
||||
if err != nil {
|
||||
_ = leftSearcher.Close()
|
||||
return nil, err
|
||||
|
@ -77,39 +83,42 @@ func boxSearcher(indexReader index.IndexReader,
|
|||
return boxSearcher, nil
|
||||
}
|
||||
|
||||
// build geoboundinggox searcher for that bounding box
|
||||
// build geoboundingbox searcher for that bounding box
|
||||
boxSearcher, err := NewGeoBoundingBoxSearcher(indexReader,
|
||||
topLeftLon, bottomRightLat, bottomRightLon, topLeftLat, field, boost,
|
||||
options, false)
|
||||
options, checkBoundaries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return boxSearcher, nil
|
||||
}
|
||||
|
||||
func buildDistFilter(indexReader index.IndexReader, field string,
|
||||
func buildDistFilter(dvReader index.DocValueReader, field string,
|
||||
centerLon, centerLat, maxDist float64) FilterFunc {
|
||||
return func(d *search.DocumentMatch) bool {
|
||||
var lon, lat float64
|
||||
// check geo matches against all numeric type terms indexed
|
||||
var lons, lats []float64
|
||||
var found bool
|
||||
err := indexReader.DocumentVisitFieldTerms(d.IndexInternalID,
|
||||
[]string{field}, func(field string, term []byte) {
|
||||
// only consider the values which are shifted 0
|
||||
prefixCoded := numeric.PrefixCoded(term)
|
||||
shift, err := prefixCoded.Shift()
|
||||
if err == nil && shift == 0 {
|
||||
i64, err := prefixCoded.Int64()
|
||||
if err == nil {
|
||||
lon = geo.MortonUnhashLon(uint64(i64))
|
||||
lat = geo.MortonUnhashLat(uint64(i64))
|
||||
found = true
|
||||
}
|
||||
|
||||
err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) {
|
||||
// only consider the values which are shifted 0
|
||||
prefixCoded := numeric.PrefixCoded(term)
|
||||
shift, err := prefixCoded.Shift()
|
||||
if err == nil && shift == 0 {
|
||||
i64, err := prefixCoded.Int64()
|
||||
if err == nil {
|
||||
lons = append(lons, geo.MortonUnhashLon(uint64(i64)))
|
||||
lats = append(lats, geo.MortonUnhashLat(uint64(i64)))
|
||||
found = true
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
if err == nil && found {
|
||||
dist := geo.Haversin(lon, lat, centerLon, centerLat)
|
||||
if dist <= maxDist/1000 {
|
||||
return true
|
||||
for i := range lons {
|
||||
dist := geo.Haversin(lons[i], lats[i], centerLon, centerLat)
|
||||
if dist <= maxDist/1000 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
|
|
126
vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go
generated
vendored
Normal file
126
vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
|||
// Copyright (c) 2019 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package searcher
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/blevesearch/bleve/geo"
|
||||
"github.com/blevesearch/bleve/index"
|
||||
"github.com/blevesearch/bleve/numeric"
|
||||
"github.com/blevesearch/bleve/search"
|
||||
"math"
|
||||
)
|
||||
|
||||
func NewGeoBoundedPolygonSearcher(indexReader index.IndexReader,
|
||||
polygon []geo.Point, field string, boost float64,
|
||||
options search.SearcherOptions) (search.Searcher, error) {
|
||||
|
||||
if len(polygon) < 3 {
|
||||
return nil, fmt.Errorf("Too few points specified for the polygon boundary")
|
||||
}
|
||||
|
||||
// compute the bounding box enclosing the polygon
|
||||
topLeftLon, topLeftLat, bottomRightLon, bottomRightLat, err :=
|
||||
geo.BoundingRectangleForPolygon(polygon)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// build a searcher for the bounding box on the polygon
|
||||
boxSearcher, err := boxSearcher(indexReader,
|
||||
topLeftLon, topLeftLat, bottomRightLon, bottomRightLat,
|
||||
field, boost, options, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dvReader, err := indexReader.DocValueReader([]string{field})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// wrap it in a filtering searcher that checks for the polygon inclusivity
|
||||
return NewFilteringSearcher(boxSearcher,
|
||||
buildPolygonFilter(dvReader, field, polygon)), nil
|
||||
}
|
||||
|
||||
const float64EqualityThreshold = 1e-6
|
||||
|
||||
func almostEqual(a, b float64) bool {
|
||||
return math.Abs(a-b) <= float64EqualityThreshold
|
||||
}
|
||||
|
||||
// buildPolygonFilter returns true if the point lies inside the
|
||||
// polygon. It is based on the ray-casting technique as referred
|
||||
// here: https://wrf.ecse.rpi.edu/nikola/pubdetails/pnpoly.html
|
||||
func buildPolygonFilter(dvReader index.DocValueReader, field string,
|
||||
polygon []geo.Point) FilterFunc {
|
||||
return func(d *search.DocumentMatch) bool {
|
||||
// check geo matches against all numeric type terms indexed
|
||||
var lons, lats []float64
|
||||
var found bool
|
||||
|
||||
err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) {
|
||||
// only consider the values which are shifted 0
|
||||
prefixCoded := numeric.PrefixCoded(term)
|
||||
shift, err := prefixCoded.Shift()
|
||||
if err == nil && shift == 0 {
|
||||
i64, err := prefixCoded.Int64()
|
||||
if err == nil {
|
||||
lons = append(lons, geo.MortonUnhashLon(uint64(i64)))
|
||||
lats = append(lats, geo.MortonUnhashLat(uint64(i64)))
|
||||
found = true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Note: this approach works for points which are strictly inside
|
||||
// the polygon. ie it might fail for certain points on the polygon boundaries.
|
||||
if err == nil && found {
|
||||
nVertices := len(polygon)
|
||||
if len(polygon) < 3 {
|
||||
return false
|
||||
}
|
||||
rayIntersectsSegment := func(point, a, b geo.Point) bool {
|
||||
return (a.Lat > point.Lat) != (b.Lat > point.Lat) &&
|
||||
point.Lon < (b.Lon-a.Lon)*(point.Lat-a.Lat)/(b.Lat-a.Lat)+a.Lon
|
||||
}
|
||||
|
||||
for i := range lons {
|
||||
pt := geo.Point{Lon: lons[i], Lat: lats[i]}
|
||||
inside := rayIntersectsSegment(pt, polygon[len(polygon)-1], polygon[0])
|
||||
// check for a direct vertex match
|
||||
if almostEqual(polygon[0].Lat, lats[i]) &&
|
||||
almostEqual(polygon[0].Lon, lons[i]) {
|
||||
return true
|
||||
}
|
||||
|
||||
for j := 1; j < nVertices; j++ {
|
||||
if almostEqual(polygon[j].Lat, lats[i]) &&
|
||||
almostEqual(polygon[j].Lon, lons[i]) {
|
||||
return true
|
||||
}
|
||||
if rayIntersectsSegment(pt, polygon[j-1], polygon[j]) {
|
||||
inside = !inside
|
||||
}
|
||||
}
|
||||
if inside {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
8
vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go
generated
vendored
8
vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go
generated
vendored
|
@ -22,6 +22,10 @@ import (
|
|||
func NewMultiTermSearcher(indexReader index.IndexReader, terms []string,
|
||||
field string, boost float64, options search.SearcherOptions, limit bool) (
|
||||
search.Searcher, error) {
|
||||
if limit && tooManyClauses(len(terms)) {
|
||||
return nil, tooManyClausesErr(len(terms))
|
||||
}
|
||||
|
||||
qsearchers := make([]search.Searcher, len(terms))
|
||||
qsearchersClose := func() {
|
||||
for _, searcher := range qsearchers {
|
||||
|
@ -46,6 +50,10 @@ func NewMultiTermSearcher(indexReader index.IndexReader, terms []string,
|
|||
func NewMultiTermSearcherBytes(indexReader index.IndexReader, terms [][]byte,
|
||||
field string, boost float64, options search.SearcherOptions, limit bool) (
|
||||
search.Searcher, error) {
|
||||
if limit && tooManyClauses(len(terms)) {
|
||||
return nil, tooManyClausesErr(len(terms))
|
||||
}
|
||||
|
||||
qsearchers := make([]search.Searcher, len(terms))
|
||||
qsearchersClose := func() {
|
||||
for _, searcher := range qsearchers {
|
||||
|
|
55
vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go
generated
vendored
55
vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go
generated
vendored
|
@ -53,22 +53,51 @@ func NewNumericRangeSearcher(indexReader index.IndexReader,
|
|||
if !*inclusiveMax && maxInt64 != math.MinInt64 {
|
||||
maxInt64--
|
||||
}
|
||||
|
||||
var fieldDict index.FieldDictContains
|
||||
var isIndexed filterFunc
|
||||
var err error
|
||||
if irr, ok := indexReader.(index.IndexReaderContains); ok {
|
||||
fieldDict, err = irr.FieldDictContains(field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
isIndexed = func(term []byte) bool {
|
||||
found, err := fieldDict.Contains(term)
|
||||
return err == nil && found
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME hard-coded precision, should match field declaration
|
||||
termRanges := splitInt64Range(minInt64, maxInt64, 4)
|
||||
terms := termRanges.Enumerate()
|
||||
terms := termRanges.Enumerate(isIndexed)
|
||||
if fieldDict != nil {
|
||||
if fd, ok := fieldDict.(index.FieldDict); ok {
|
||||
cerr := fd.Close()
|
||||
if cerr != nil {
|
||||
err = cerr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(terms) < 1 {
|
||||
// cannot return MatchNoneSearcher because of interaction with
|
||||
// commit f391b991c20f02681bacd197afc6d8aed444e132
|
||||
return NewMultiTermSearcherBytes(indexReader, terms, field, boost, options,
|
||||
true)
|
||||
}
|
||||
var err error
|
||||
terms, err = filterCandidateTerms(indexReader, terms, field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
// for upside_down
|
||||
if isIndexed == nil {
|
||||
terms, err = filterCandidateTerms(indexReader, terms, field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if tooManyClauses(len(terms)) {
|
||||
return nil, tooManyClausesErr()
|
||||
return nil, tooManyClausesErr(len(terms))
|
||||
}
|
||||
|
||||
return NewMultiTermSearcherBytes(indexReader, terms, field, boost, options,
|
||||
|
@ -125,11 +154,17 @@ type termRange struct {
|
|||
endTerm []byte
|
||||
}
|
||||
|
||||
func (t *termRange) Enumerate() [][]byte {
|
||||
func (t *termRange) Enumerate(filter filterFunc) [][]byte {
|
||||
var rv [][]byte
|
||||
next := t.startTerm
|
||||
for bytes.Compare(next, t.endTerm) <= 0 {
|
||||
rv = append(rv, next)
|
||||
if filter != nil {
|
||||
if filter(next) {
|
||||
rv = append(rv, next)
|
||||
}
|
||||
} else {
|
||||
rv = append(rv, next)
|
||||
}
|
||||
next = incrementBytes(next)
|
||||
}
|
||||
return rv
|
||||
|
@ -150,10 +185,10 @@ func incrementBytes(in []byte) []byte {
|
|||
|
||||
type termRanges []*termRange
|
||||
|
||||
func (tr termRanges) Enumerate() [][]byte {
|
||||
func (tr termRanges) Enumerate(filter filterFunc) [][]byte {
|
||||
var rv [][]byte
|
||||
for _, tri := range tr {
|
||||
trie := tri.Enumerate()
|
||||
trie := tri.Enumerate(filter)
|
||||
rv = append(rv, trie...)
|
||||
}
|
||||
return rv
|
||||
|
|
6
vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go
generated
vendored
6
vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go
generated
vendored
|
@ -32,7 +32,7 @@ func init() {
|
|||
}
|
||||
|
||||
type PhraseSearcher struct {
|
||||
mustSearcher *ConjunctionSearcher
|
||||
mustSearcher search.Searcher
|
||||
queryNorm float64
|
||||
currMust *search.DocumentMatch
|
||||
terms [][]string
|
||||
|
@ -210,7 +210,7 @@ func (s *PhraseSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch,
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
// checkCurrMustMatch is soley concerned with determining if the DocumentMatch
|
||||
// checkCurrMustMatch is solely concerned with determining if the DocumentMatch
|
||||
// pointed to by s.currMust (which satisifies the pre-condition searcher)
|
||||
// also satisfies the phase constraints. if so, it returns a DocumentMatch
|
||||
// for this document, otherwise nil
|
||||
|
@ -241,7 +241,7 @@ func (s *PhraseSearcher) checkCurrMustMatch(ctx *search.SearchContext) *search.D
|
|||
return nil
|
||||
}
|
||||
|
||||
// checkCurrMustMatchField is soley concerned with determining if one
|
||||
// checkCurrMustMatchField is solely concerned with determining if one
|
||||
// particular field within the currMust DocumentMatch Locations
|
||||
// satisfies the phase constraints (possibly more than once). if so,
|
||||
// the matching field term locations are appended to the provided
|
||||
|
|
83
vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go
generated
vendored
83
vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go
generated
vendored
|
@ -21,48 +21,67 @@ import (
|
|||
"github.com/blevesearch/bleve/search"
|
||||
)
|
||||
|
||||
// NewRegexpStringSearcher is similar to NewRegexpSearcher, but
|
||||
// additionally optimizes for index readers that handle regexp's.
|
||||
func NewRegexpStringSearcher(indexReader index.IndexReader, pattern string,
|
||||
field string, boost float64, options search.SearcherOptions) (
|
||||
search.Searcher, error) {
|
||||
ir, ok := indexReader.(index.IndexReaderRegexp)
|
||||
if !ok {
|
||||
r, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewRegexpSearcher(indexReader, r, field, boost, options)
|
||||
}
|
||||
|
||||
fieldDict, err := ir.FieldDictRegexp(field, pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
var candidateTerms []string
|
||||
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
candidateTerms = append(candidateTerms, tfd.Term)
|
||||
tfd, err = fieldDict.Next()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewMultiTermSearcher(indexReader, candidateTerms, field, boost,
|
||||
options, true)
|
||||
}
|
||||
|
||||
// NewRegexpSearcher creates a searcher which will match documents that
|
||||
// contain terms which match the pattern regexp. The match must be EXACT
|
||||
// matching the entire term. The provided regexp SHOULD NOT start with ^
|
||||
// or end with $ as this can intefere with the implementation. Separately,
|
||||
// matches will be checked to ensure they match the entire term.
|
||||
func NewRegexpSearcher(indexReader index.IndexReader, pattern *regexp.Regexp,
|
||||
func NewRegexpSearcher(indexReader index.IndexReader, pattern index.Regexp,
|
||||
field string, boost float64, options search.SearcherOptions) (
|
||||
search.Searcher, error) {
|
||||
var candidateTerms []string
|
||||
if ir, ok := indexReader.(index.IndexReaderRegexp); ok {
|
||||
fieldDict, err := ir.FieldDictRegexp(field, []byte(pattern.String()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
// enumerate the terms and check against regexp
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
candidateTerms = append(candidateTerms, tfd.Term)
|
||||
tfd, err = fieldDict.Next()
|
||||
}
|
||||
prefixTerm, complete := pattern.LiteralPrefix()
|
||||
if complete {
|
||||
// there is no pattern
|
||||
candidateTerms = []string{prefixTerm}
|
||||
} else {
|
||||
var err error
|
||||
candidateTerms, err = findRegexpCandidateTerms(indexReader, pattern, field,
|
||||
prefixTerm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
prefixTerm, complete := pattern.LiteralPrefix()
|
||||
if complete {
|
||||
// there is no pattern
|
||||
candidateTerms = []string{prefixTerm}
|
||||
} else {
|
||||
var err error
|
||||
candidateTerms, err = findRegexpCandidateTerms(indexReader, pattern, field,
|
||||
prefixTerm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return NewMultiTermSearcher(indexReader, candidateTerms, field, boost,
|
||||
|
@ -70,7 +89,7 @@ func NewRegexpSearcher(indexReader index.IndexReader, pattern *regexp.Regexp,
|
|||
}
|
||||
|
||||
func findRegexpCandidateTerms(indexReader index.IndexReader,
|
||||
pattern *regexp.Regexp, field, prefixTerm string) (rv []string, err error) {
|
||||
pattern index.Regexp, field, prefixTerm string) (rv []string, err error) {
|
||||
rv = make([]string, 0)
|
||||
var fieldDict index.FieldDict
|
||||
if len(prefixTerm) > 0 {
|
||||
|
@ -91,7 +110,7 @@ func findRegexpCandidateTerms(indexReader index.IndexReader,
|
|||
if matchPos != nil && matchPos[0] == 0 && matchPos[1] == len(tfd.Term) {
|
||||
rv = append(rv, tfd.Term)
|
||||
if tooManyClauses(len(rv)) {
|
||||
return rv, tooManyClausesErr()
|
||||
return rv, tooManyClausesErr(len(rv))
|
||||
}
|
||||
}
|
||||
tfd, err = fieldDict.Next()
|
||||
|
|
24
vendor/github.com/blevesearch/bleve/search/searcher/search_term.go
generated
vendored
24
vendor/github.com/blevesearch/bleve/search/searcher/search_term.go
generated
vendored
|
@ -38,28 +38,20 @@ type TermSearcher struct {
|
|||
}
|
||||
|
||||
func NewTermSearcher(indexReader index.IndexReader, term string, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) {
|
||||
reader, err := indexReader.TermFieldReader([]byte(term), field, true, true, options.IncludeTermVectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := indexReader.DocCount()
|
||||
if err != nil {
|
||||
_ = reader.Close()
|
||||
return nil, err
|
||||
}
|
||||
scorer := scorer.NewTermQueryScorer([]byte(term), field, boost, count, reader.Count(), options)
|
||||
return &TermSearcher{
|
||||
indexReader: indexReader,
|
||||
reader: reader,
|
||||
scorer: scorer,
|
||||
}, nil
|
||||
return NewTermSearcherBytes(indexReader, []byte(term), field, boost, options)
|
||||
}
|
||||
|
||||
func NewTermSearcherBytes(indexReader index.IndexReader, term []byte, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) {
|
||||
reader, err := indexReader.TermFieldReader(term, field, true, true, options.IncludeTermVectors)
|
||||
needFreqNorm := options.Score != "none"
|
||||
reader, err := indexReader.TermFieldReader(term, field, needFreqNorm, needFreqNorm, options.IncludeTermVectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newTermSearcherFromReader(indexReader, reader, term, field, boost, options)
|
||||
}
|
||||
|
||||
func newTermSearcherFromReader(indexReader index.IndexReader, reader index.TermFieldReader,
|
||||
term []byte, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) {
|
||||
count, err := indexReader.DocCount()
|
||||
if err != nil {
|
||||
_ = reader.Close()
|
||||
|
|
11
vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go
generated
vendored
11
vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go
generated
vendored
|
@ -27,13 +27,24 @@ func NewTermPrefixSearcher(indexReader index.IndexReader, prefix string,
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
var terms []string
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
terms = append(terms, tfd.Term)
|
||||
if tooManyClauses(len(terms)) {
|
||||
return nil, tooManyClausesErr(len(terms))
|
||||
}
|
||||
tfd, err = fieldDict.Next()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewMultiTermSearcher(indexReader, terms, field, boost, options, true)
|
||||
}
|
||||
|
|
6
vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go
generated
vendored
6
vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go
generated
vendored
|
@ -48,6 +48,12 @@ func NewTermRangeSearcher(indexReader index.IndexReader,
|
|||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if cerr := fieldDict.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
var terms []string
|
||||
tfd, err := fieldDict.Next()
|
||||
for err == nil && tfd != nil {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue