mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-10-31 22:58:59 +01:00
Upgrade blevesearch dependency to v2.0.1 (#14346)
* Upgrade blevesearch dependency to v2.0.1 * Update rupture to v1.0.0 * Fix test
This commit is contained in:
parent
3aa53dc6bc
commit
f5abe2f563
459 changed files with 7518 additions and 4211 deletions
8
go.mod
8
go.mod
|
@ -23,7 +23,7 @@ require (
|
||||||
github.com/alecthomas/chroma v0.8.2
|
github.com/alecthomas/chroma v0.8.2
|
||||||
github.com/andybalholm/brotli v1.0.1 // indirect
|
github.com/andybalholm/brotli v1.0.1 // indirect
|
||||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||||
github.com/blevesearch/bleve v1.0.14
|
github.com/blevesearch/bleve/v2 v2.0.1
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||||
github.com/denisenkom/go-mssqldb v0.9.0
|
github.com/denisenkom/go-mssqldb v0.9.0
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
|
@ -31,7 +31,7 @@ require (
|
||||||
github.com/dustin/go-humanize v1.0.0
|
github.com/dustin/go-humanize v1.0.0
|
||||||
github.com/editorconfig/editorconfig-core-go/v2 v2.3.9
|
github.com/editorconfig/editorconfig-core-go/v2 v2.3.9
|
||||||
github.com/emirpasic/gods v1.12.0
|
github.com/emirpasic/gods v1.12.0
|
||||||
github.com/ethantkoenig/rupture v0.0.0-20181029165146-c3b3b810dc77
|
github.com/ethantkoenig/rupture v1.0.0
|
||||||
github.com/gliderlabs/ssh v0.3.1
|
github.com/gliderlabs/ssh v0.3.1
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a // indirect
|
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a // indirect
|
||||||
github.com/go-chi/chi v1.5.1
|
github.com/go-chi/chi v1.5.1
|
||||||
|
@ -90,7 +90,7 @@ require (
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546
|
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546
|
||||||
github.com/spf13/viper v1.7.1 // indirect
|
github.com/spf13/viper v1.7.1 // indirect
|
||||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||||
github.com/stretchr/testify v1.6.1
|
github.com/stretchr/testify v1.7.0
|
||||||
github.com/syndtr/goleveldb v1.0.0
|
github.com/syndtr/goleveldb v1.0.0
|
||||||
github.com/tinylib/msgp v1.1.5 // indirect
|
github.com/tinylib/msgp v1.1.5 // indirect
|
||||||
github.com/tstranex/u2f v1.0.0
|
github.com/tstranex/u2f v1.0.0
|
||||||
|
@ -111,7 +111,7 @@ require (
|
||||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620
|
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102
|
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102
|
||||||
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43
|
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43
|
||||||
golang.org/x/sys v0.0.0-20201211090839-8ad439b19e0f
|
golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78
|
||||||
golang.org/x/text v0.3.4
|
golang.org/x/text v0.3.4
|
||||||
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
|
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
|
||||||
golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9
|
golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9
|
||||||
|
|
65
go.sum
65
go.sum
|
@ -164,29 +164,32 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
|
||||||
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
||||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/blevesearch/bleve v1.0.14 h1:Q8r+fHTt35jtGXJUM0ULwM3Tzg+MRfyai4ZkWDy2xO4=
|
github.com/blevesearch/bleve/v2 v2.0.1 h1:v1eV5K+/lndsjnykeVcuU9J4cJnjKLUKSwxXFxZsLuY=
|
||||||
github.com/blevesearch/bleve v1.0.14/go.mod h1:e/LJTr+E7EaoVdkQZTfoz7dt4KoDNvDbLb8MSKuNTLQ=
|
github.com/blevesearch/bleve/v2 v2.0.1/go.mod h1:OBP2Pktqik8vEiUlGhuWjYx7KiO4zD542+DHqICwM5w=
|
||||||
github.com/blevesearch/blevex v1.0.0 h1:pnilj2Qi3YSEGdWgLj1Pn9Io7ukfXPoQcpAI1Bv8n/o=
|
github.com/blevesearch/bleve_index_api v1.0.0 h1:Ds3XeuTxjXCkG6pgIwWDRyooJKNIuOKemnN0N0IkhTU=
|
||||||
github.com/blevesearch/blevex v1.0.0/go.mod h1:2rNVqoG2BZI8t1/P1awgTKnGlx5MP9ZbtEciQaNhswc=
|
github.com/blevesearch/bleve_index_api v1.0.0/go.mod h1:fiwKS0xLEm+gBRgv5mumf0dhgFr2mDgZah1pqv1c1M4=
|
||||||
github.com/blevesearch/cld2 v0.0.0-20200327141045-8b5f551d37f5/go.mod h1:PN0QNTLs9+j1bKy3d/GB/59wsNBFC4sWLWG3k69lWbc=
|
|
||||||
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
|
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
|
||||||
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
|
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
|
||||||
github.com/blevesearch/mmap-go v1.0.2 h1:JtMHb+FgQCTTYIhtMvimw15dJwu1Y5lrZDMOFXVWPk0=
|
github.com/blevesearch/mmap-go v1.0.2 h1:JtMHb+FgQCTTYIhtMvimw15dJwu1Y5lrZDMOFXVWPk0=
|
||||||
github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
|
github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
|
||||||
|
github.com/blevesearch/scorch_segment_api v1.0.0 h1:BUkCPWDg2gimTEyVDXf85I2buqqt4lh28uaVMiJsIYk=
|
||||||
|
github.com/blevesearch/scorch_segment_api v1.0.0/go.mod h1:KgRYmlfYC27NeM6cXOHx8LBgq7jn0atpV8mVWoBKBng=
|
||||||
github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac=
|
github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac=
|
||||||
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
||||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||||
github.com/blevesearch/zap/v11 v11.0.14 h1:IrDAvtlzDylh6H2QCmS0OGcN9Hpf6mISJlfKjcwJs7k=
|
github.com/blevesearch/upsidedown_store_api v1.0.1 h1:1SYRwyoFLwG3sj0ed89RLtM15amfX2pXlYbFOnF8zNU=
|
||||||
github.com/blevesearch/zap/v11 v11.0.14/go.mod h1:MUEZh6VHGXv1PKx3WnCbdP404LGG2IZVa/L66pyFwnY=
|
github.com/blevesearch/upsidedown_store_api v1.0.1/go.mod h1:MQDVGpHZrpe3Uy26zJBf/a8h0FZY6xJbthIMm8myH2Q=
|
||||||
github.com/blevesearch/zap/v12 v12.0.14 h1:2o9iRtl1xaRjsJ1xcqTyLX414qPAwykHNV7wNVmbp3w=
|
github.com/blevesearch/zapx/v11 v11.1.10 h1:8Eo3rXiHsVSP9Sk+4StrrwLrj9vyulhMVPmxTf8ZuDg=
|
||||||
github.com/blevesearch/zap/v12 v12.0.14/go.mod h1:rOnuZOiMKPQj18AEKEHJxuI14236tTQ1ZJz4PAnWlUg=
|
github.com/blevesearch/zapx/v11 v11.1.10/go.mod h1:DTjbcBqrr/Uo82UBilDC8lEew42gN/OcIyiTNFtSijc=
|
||||||
github.com/blevesearch/zap/v13 v13.0.6 h1:r+VNSVImi9cBhTNNR+Kfl5uiGy8kIbb0JMz/h8r6+O4=
|
github.com/blevesearch/zapx/v12 v12.1.10 h1:sqR+/0Z4dSTovApRqLA1HnilMtQer7a4UvPrNmPzlTM=
|
||||||
github.com/blevesearch/zap/v13 v13.0.6/go.mod h1:L89gsjdRKGyGrRN6nCpIScCvvkyxvmeDCwZRcjjPCrw=
|
github.com/blevesearch/zapx/v12 v12.1.10/go.mod h1:14NmKnPrnKAIyiEJM566k/Jk+FQpuiflT5d3uaaK3MI=
|
||||||
github.com/blevesearch/zap/v14 v14.0.5 h1:NdcT+81Nvmp2zL+NhwSvGSLh7xNgGL8QRVZ67njR0NU=
|
github.com/blevesearch/zapx/v13 v13.1.10 h1:zCneEVRJDXwtDfSwh+33Dxguliv192vCK283zdGH4Sw=
|
||||||
github.com/blevesearch/zap/v14 v14.0.5/go.mod h1:bWe8S7tRrSBTIaZ6cLRbgNH4TUDaC9LZSpRGs85AsGY=
|
github.com/blevesearch/zapx/v13 v13.1.10/go.mod h1:YsVY6YGpTEAlJOMjdL7EsdBLvjWd8kPa2gwJDNpqLJo=
|
||||||
github.com/blevesearch/zap/v15 v15.0.3 h1:Ylj8Oe+mo0P25tr9iLPp33lN6d4qcztGjaIsP51UxaY=
|
github.com/blevesearch/zapx/v14 v14.1.10 h1:nD0vw2jxKogJFfA5WyoS4wNwZlVby3Aq8aW7CZi6YIw=
|
||||||
github.com/blevesearch/zap/v15 v15.0.3/go.mod h1:iuwQrImsh1WjWJ0Ue2kBqY83a0rFtJTqfa9fp1rbVVU=
|
github.com/blevesearch/zapx/v14 v14.1.10/go.mod h1:hsULl5eJSxs5NEfBsmeT9qrqdCP+/ecpVZKt60M4V64=
|
||||||
|
github.com/blevesearch/zapx/v15 v15.1.10 h1:kZR3b9jO9l6s2B5UHI+1N1llLzJ4nYikkXQTMrDl1vQ=
|
||||||
|
github.com/blevesearch/zapx/v15 v15.1.10/go.mod h1:4ypq25bwtSQKzwEF1UERyIhmGTbMT3brY/n4NC5gRnM=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
||||||
|
@ -247,12 +250,6 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsr
|
||||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d h1:SwD98825d6bdB+pEuTxWOXiSjBrHdOl/UVp75eI7JT8=
|
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8=
|
|
||||||
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 h1:iwZdTE0PVqJCos1vaoKsclOGD3ADKpshg3SRtYBbwso=
|
|
||||||
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM=
|
|
||||||
github.com/cznic/strutil v0.0.0-20181122101858-275e90344537 h1:MZRmHqDBd0vxNwenEbKSQqRVT24d3C05ft8kduSwlqM=
|
|
||||||
github.com/cznic/strutil v0.0.0-20181122101858-275e90344537/go.mod h1:AHHPPPXTw0h6pVabbcbyGRK1DckRn7r/STdZEeIDzZc=
|
|
||||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||||
|
@ -297,14 +294,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
|
||||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||||
github.com/ethantkoenig/rupture v0.0.0-20181029165146-c3b3b810dc77 h1:ZLWiTTzTUBb0WEXUxobYI/RxULIzOoIP7pgfDd4p1cw=
|
github.com/ethantkoenig/rupture v1.0.0 h1:gPInt1N30UErGNzd8t5js5Qbnpjcd1l6yU2MCrJxIe8=
|
||||||
github.com/ethantkoenig/rupture v0.0.0-20181029165146-c3b3b810dc77/go.mod h1:MkKY/CB98aVE4VxO63X5vTQKUgcn+3XP15LMASe3lYs=
|
github.com/ethantkoenig/rupture v1.0.0/go.mod h1:GyE9QabHfxA6ch0NZgwsHopRbOLcYjUr9g4FTJmq0WM=
|
||||||
github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c h1:8ISkoahWXwZR41ois5lSJBSVw4D0OV19Ht/JSTzvSv0=
|
|
||||||
github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c/go.mod h1:Yg+htXGokKKdzcwhuNDwVvN+uBxDGXJ7G/VN1d8fa64=
|
|
||||||
github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojtoVVWjGfOF9635RETekkoH6Cc9SX0A=
|
|
||||||
github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg=
|
|
||||||
github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4 h1:7HZCaLC5+BZpmbhCOZJ293Lz68O7PYrF2EzeiFMwCLk=
|
|
||||||
github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0=
|
|
||||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
||||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||||
|
@ -326,6 +317,7 @@ github.com/gliderlabs/ssh v0.3.1 h1:L6VrMUGZaMlNIMN8Hj+CHh4U9yodJE3FAt/rgvfaKvE=
|
||||||
github.com/gliderlabs/ssh v0.3.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
github.com/gliderlabs/ssh v0.3.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||||
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||||
|
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2 h1:Ujru1hufTHVb++eG6OuNDKMxZnGIvF6o/u8q/8h2+I4=
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a h1:FQqoVvjbiUioBBFUL5up+h+GdCa/AnJsL/1bIs/veSI=
|
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a h1:FQqoVvjbiUioBBFUL5up+h+GdCa/AnJsL/1bIs/veSI=
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||||
|
@ -618,7 +610,6 @@ github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw
|
||||||
github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||||
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/ikawaha/kagome.ipadic v1.1.2/go.mod h1:DPSBbU0czaJhAb/5uKQZHMc9MTVRpDugJfX+HddPHHg=
|
|
||||||
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
|
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
|
||||||
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||||
github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
|
github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
|
||||||
|
@ -680,8 +671,6 @@ github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGAR
|
||||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
|
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
|
||||||
github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U=
|
|
||||||
github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ=
|
|
||||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
|
@ -970,8 +959,6 @@ github.com/quasoft/websspi v1.0.0 h1:5nDgdM5xSur9s+B5w2xQ5kxf5nUGqgFgU4W0aDLZ8Mw
|
||||||
github.com/quasoft/websspi v1.0.0/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
|
github.com/quasoft/websspi v1.0.0/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
|
||||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
|
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
|
@ -1063,15 +1050,15 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P
|
||||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||||
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
||||||
github.com/tebeka/snowball v0.4.2/go.mod h1:4IfL14h1lvwZcp1sfXuuc7/7yCsvVffTWxWxCLfFpYg=
|
|
||||||
github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c h1:g+WoO5jjkqGAzHWCjJB1zZfXPIAaDpzXIEJ0eS6B5Ok=
|
|
||||||
github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c/go.mod h1:ahpPrc7HpcfEWDQRZEmnXMzHY03mLDYMCxeDzy46i+8=
|
|
||||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||||
|
github.com/tinylib/msgp v1.1.0 h1:9fQd+ICuRIu/ue4vxJZu6/LzxN0HwMds2nq/0cFvxHU=
|
||||||
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||||
github.com/tinylib/msgp v1.1.5 h1:2gXmtWueD2HefZHQe1QOy9HVzmFrLOVvsXwXBQ0ayy0=
|
github.com/tinylib/msgp v1.1.5 h1:2gXmtWueD2HefZHQe1QOy9HVzmFrLOVvsXwXBQ0ayy0=
|
||||||
github.com/tinylib/msgp v1.1.5/go.mod h1:eQsjooMTnV42mHu917E26IogZ2930nFyBQdofk10Udg=
|
github.com/tinylib/msgp v1.1.5/go.mod h1:eQsjooMTnV42mHu917E26IogZ2930nFyBQdofk10Udg=
|
||||||
|
@ -1362,8 +1349,8 @@ golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211 h1:9UQO31fZ+0aKQOFldThf7BKPMJTiBfWycGh/u3UoO88=
|
golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211 h1:9UQO31fZ+0aKQOFldThf7BKPMJTiBfWycGh/u3UoO88=
|
||||||
golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201211090839-8ad439b19e0f h1:QdHQnPce6K4XQewki9WNbG5KOROuDzqO3NaYjI1cXJ0=
|
golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78 h1:nVuTkr9L6Bq62qpUqKo/RnZCFfzDBL0bYo6w9OJUqZY=
|
||||||
golang.org/x/sys v0.0.0-20201211090839-8ad439b19e0f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
|
|
@ -21,15 +21,15 @@ import (
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve"
|
"github.com/blevesearch/bleve/v2"
|
||||||
analyzer_custom "github.com/blevesearch/bleve/analysis/analyzer/custom"
|
analyzer_custom "github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
|
||||||
analyzer_keyword "github.com/blevesearch/bleve/analysis/analyzer/keyword"
|
analyzer_keyword "github.com/blevesearch/bleve/v2/analysis/analyzer/keyword"
|
||||||
"github.com/blevesearch/bleve/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/analysis/token/unicodenorm"
|
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
||||||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/index/upsidedown"
|
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
||||||
"github.com/blevesearch/bleve/mapping"
|
"github.com/blevesearch/bleve/v2/mapping"
|
||||||
"github.com/blevesearch/bleve/search/query"
|
"github.com/blevesearch/bleve/v2/search/query"
|
||||||
"github.com/ethantkoenig/rupture"
|
"github.com/ethantkoenig/rupture"
|
||||||
"github.com/go-enry/go-enry/v2"
|
"github.com/go-enry/go-enry/v2"
|
||||||
)
|
)
|
||||||
|
|
|
@ -11,14 +11,14 @@ import (
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"github.com/blevesearch/bleve"
|
"github.com/blevesearch/bleve/v2"
|
||||||
"github.com/blevesearch/bleve/analysis/analyzer/custom"
|
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
|
||||||
"github.com/blevesearch/bleve/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/analysis/token/unicodenorm"
|
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
||||||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/index/upsidedown"
|
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
||||||
"github.com/blevesearch/bleve/mapping"
|
"github.com/blevesearch/bleve/v2/mapping"
|
||||||
"github.com/blevesearch/bleve/search/query"
|
"github.com/blevesearch/bleve/v2/search/query"
|
||||||
"github.com/ethantkoenig/rupture"
|
"github.com/ethantkoenig/rupture"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -92,6 +92,6 @@ func TestBleveIndexAndSearch(t *testing.T) {
|
||||||
for _, hit := range res.Hits {
|
for _, hit := range res.Hits {
|
||||||
ids = append(ids, hit.ID)
|
ids = append(ids, hit.ID)
|
||||||
}
|
}
|
||||||
assert.EqualValues(t, kw.IDs, ids)
|
assert.ElementsMatch(t, kw.IDs, ids)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
110
vendor/github.com/blevesearch/bleve/index/analysis.go
generated
vendored
110
vendor/github.com/blevesearch/bleve/index/analysis.go
generated
vendored
|
@ -1,110 +0,0 @@
|
||||||
// Copyright (c) 2015 Couchbase, Inc.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package index
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
|
||||||
"github.com/blevesearch/bleve/document"
|
|
||||||
"github.com/blevesearch/bleve/size"
|
|
||||||
)
|
|
||||||
|
|
||||||
var reflectStaticSizeAnalysisResult int
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
var ar AnalysisResult
|
|
||||||
reflectStaticSizeAnalysisResult = int(reflect.TypeOf(ar).Size())
|
|
||||||
}
|
|
||||||
|
|
||||||
type IndexRow interface {
|
|
||||||
KeySize() int
|
|
||||||
KeyTo([]byte) (int, error)
|
|
||||||
Key() []byte
|
|
||||||
|
|
||||||
ValueSize() int
|
|
||||||
ValueTo([]byte) (int, error)
|
|
||||||
Value() []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
type AnalysisResult struct {
|
|
||||||
DocID string
|
|
||||||
Rows []IndexRow
|
|
||||||
|
|
||||||
// scorch
|
|
||||||
Document *document.Document
|
|
||||||
Analyzed []analysis.TokenFrequencies
|
|
||||||
Length []int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalysisResult) Size() int {
|
|
||||||
rv := reflectStaticSizeAnalysisResult
|
|
||||||
for _, analyzedI := range a.Analyzed {
|
|
||||||
rv += analyzedI.Size()
|
|
||||||
}
|
|
||||||
rv += len(a.Length) * size.SizeOfInt
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
type AnalysisWork struct {
|
|
||||||
i Index
|
|
||||||
d *document.Document
|
|
||||||
rc chan *AnalysisResult
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAnalysisWork(i Index, d *document.Document, rc chan *AnalysisResult) *AnalysisWork {
|
|
||||||
return &AnalysisWork{
|
|
||||||
i: i,
|
|
||||||
d: d,
|
|
||||||
rc: rc,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type AnalysisQueue struct {
|
|
||||||
queue chan *AnalysisWork
|
|
||||||
done chan struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *AnalysisQueue) Queue(work *AnalysisWork) {
|
|
||||||
q.queue <- work
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *AnalysisQueue) Close() {
|
|
||||||
close(q.done)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewAnalysisQueue(numWorkers int) *AnalysisQueue {
|
|
||||||
rv := AnalysisQueue{
|
|
||||||
queue: make(chan *AnalysisWork),
|
|
||||||
done: make(chan struct{}),
|
|
||||||
}
|
|
||||||
for i := 0; i < numWorkers; i++ {
|
|
||||||
go AnalysisWorker(rv)
|
|
||||||
}
|
|
||||||
return &rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func AnalysisWorker(q AnalysisQueue) {
|
|
||||||
// read work off the queue
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-q.done:
|
|
||||||
return
|
|
||||||
case w := <-q.queue:
|
|
||||||
r := w.i.Analyze(w.d)
|
|
||||||
w.rc <- r
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
137
vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go
generated
vendored
137
vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go
generated
vendored
|
@ -1,137 +0,0 @@
|
||||||
// Copyright (c) 2017 Couchbase, Inc.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package segment
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
|
||||||
"github.com/blevesearch/bleve/index"
|
|
||||||
"github.com/couchbase/vellum"
|
|
||||||
)
|
|
||||||
|
|
||||||
type EmptySegment struct{}
|
|
||||||
|
|
||||||
func (e *EmptySegment) Dictionary(field string) (TermDictionary, error) {
|
|
||||||
return &EmptyDictionary{}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) VisitDocument(num uint64, visitor DocumentFieldValueVisitor) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) DocID(num uint64) ([]byte, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) Count() uint64 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) DocNumbers([]string) (*roaring.Bitmap, error) {
|
|
||||||
r := roaring.NewBitmap()
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) Fields() []string {
|
|
||||||
return []string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) Close() error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) Size() uint64 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) AddRef() {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptySegment) DecRef() error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type EmptyDictionary struct{}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) PostingsList(term []byte,
|
|
||||||
except *roaring.Bitmap, prealloc PostingsList) (PostingsList, error) {
|
|
||||||
return &EmptyPostingsList{}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) Iterator() DictionaryIterator {
|
|
||||||
return &EmptyDictionaryIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) PrefixIterator(prefix string) DictionaryIterator {
|
|
||||||
return &EmptyDictionaryIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) RangeIterator(start, end string) DictionaryIterator {
|
|
||||||
return &EmptyDictionaryIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) AutomatonIterator(a vellum.Automaton,
|
|
||||||
startKeyInclusive, endKeyExclusive []byte) DictionaryIterator {
|
|
||||||
return &EmptyDictionaryIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) OnlyIterator(onlyTerms [][]byte,
|
|
||||||
includeCount bool) DictionaryIterator {
|
|
||||||
return &EmptyDictionaryIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionary) Contains(key []byte) (bool, error) {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type EmptyDictionaryIterator struct{}
|
|
||||||
|
|
||||||
func (e *EmptyDictionaryIterator) Next() (*index.DictEntry, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyDictionaryIterator) Contains(key []byte) (bool, error) {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type EmptyPostingsList struct{}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsList) Iterator(includeFreq, includeNorm, includeLocations bool,
|
|
||||||
prealloc PostingsIterator) PostingsIterator {
|
|
||||||
return &EmptyPostingsIterator{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsList) Size() int {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsList) Count() uint64 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
type EmptyPostingsIterator struct{}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsIterator) Next() (Posting, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsIterator) Advance(uint64) (Posting, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmptyPostingsIterator) Size() int {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
var AnEmptyPostingsIterator = &EmptyPostingsIterator{}
|
|
176
vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go
generated
vendored
176
vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go
generated
vendored
|
@ -1,176 +0,0 @@
|
||||||
// Copyright 2014 The Cockroach Authors.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
// implied. See the License for the specific language governing
|
|
||||||
// permissions and limitations under the License.
|
|
||||||
|
|
||||||
// This code originated from:
|
|
||||||
// https://github.com/cockroachdb/cockroach/blob/2dd65dde5d90c157f4b93f92502ca1063b904e1d/pkg/util/encoding/encoding.go
|
|
||||||
|
|
||||||
// Modified to not use pkg/errors
|
|
||||||
|
|
||||||
package segment
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
MaxVarintSize = 9
|
|
||||||
|
|
||||||
// IntMin is chosen such that the range of int tags does not overlap the
|
|
||||||
// ascii character set that is frequently used in testing.
|
|
||||||
IntMin = 0x80 // 128
|
|
||||||
intMaxWidth = 8
|
|
||||||
intZero = IntMin + intMaxWidth // 136
|
|
||||||
intSmall = IntMax - intZero - intMaxWidth // 109
|
|
||||||
// IntMax is the maximum int tag value.
|
|
||||||
IntMax = 0xfd // 253
|
|
||||||
)
|
|
||||||
|
|
||||||
// EncodeUvarintAscending encodes the uint64 value using a variable length
|
|
||||||
// (length-prefixed) representation. The length is encoded as a single
|
|
||||||
// byte indicating the number of encoded bytes (-8) to follow. See
|
|
||||||
// EncodeVarintAscending for rationale. The encoded bytes are appended to the
|
|
||||||
// supplied buffer and the final buffer is returned.
|
|
||||||
func EncodeUvarintAscending(b []byte, v uint64) []byte {
|
|
||||||
switch {
|
|
||||||
case v <= intSmall:
|
|
||||||
return append(b, intZero+byte(v))
|
|
||||||
case v <= 0xff:
|
|
||||||
return append(b, IntMax-7, byte(v))
|
|
||||||
case v <= 0xffff:
|
|
||||||
return append(b, IntMax-6, byte(v>>8), byte(v))
|
|
||||||
case v <= 0xffffff:
|
|
||||||
return append(b, IntMax-5, byte(v>>16), byte(v>>8), byte(v))
|
|
||||||
case v <= 0xffffffff:
|
|
||||||
return append(b, IntMax-4, byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
|
|
||||||
case v <= 0xffffffffff:
|
|
||||||
return append(b, IntMax-3, byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8),
|
|
||||||
byte(v))
|
|
||||||
case v <= 0xffffffffffff:
|
|
||||||
return append(b, IntMax-2, byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16),
|
|
||||||
byte(v>>8), byte(v))
|
|
||||||
case v <= 0xffffffffffffff:
|
|
||||||
return append(b, IntMax-1, byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24),
|
|
||||||
byte(v>>16), byte(v>>8), byte(v))
|
|
||||||
default:
|
|
||||||
return append(b, IntMax, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32),
|
|
||||||
byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeUvarintAscending decodes a varint encoded uint64 from the input
|
|
||||||
// buffer. The remainder of the input buffer and the decoded uint64
|
|
||||||
// are returned.
|
|
||||||
func DecodeUvarintAscending(b []byte) ([]byte, uint64, error) {
|
|
||||||
if len(b) == 0 {
|
|
||||||
return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value")
|
|
||||||
}
|
|
||||||
length := int(b[0]) - intZero
|
|
||||||
b = b[1:] // skip length byte
|
|
||||||
if length <= intSmall {
|
|
||||||
return b, uint64(length), nil
|
|
||||||
}
|
|
||||||
length -= intSmall
|
|
||||||
if length < 0 || length > 8 {
|
|
||||||
return nil, 0, fmt.Errorf("invalid uvarint length of %d", length)
|
|
||||||
} else if len(b) < length {
|
|
||||||
return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value: %q", b)
|
|
||||||
}
|
|
||||||
var v uint64
|
|
||||||
// It is faster to range over the elements in a slice than to index
|
|
||||||
// into the slice on each loop iteration.
|
|
||||||
for _, t := range b[:length] {
|
|
||||||
v = (v << 8) | uint64(t)
|
|
||||||
}
|
|
||||||
return b[length:], v, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------
|
|
||||||
|
|
||||||
type MemUvarintReader struct {
|
|
||||||
C int // index of next byte to read from S
|
|
||||||
S []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewMemUvarintReader(s []byte) *MemUvarintReader {
|
|
||||||
return &MemUvarintReader{S: s}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Len returns the number of unread bytes.
|
|
||||||
func (r *MemUvarintReader) Len() int {
|
|
||||||
n := len(r.S) - r.C
|
|
||||||
if n < 0 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
|
|
||||||
var ErrMemUvarintReaderOverflow = errors.New("MemUvarintReader overflow")
|
|
||||||
|
|
||||||
// ReadUvarint reads an encoded uint64. The original code this was
|
|
||||||
// based on is at encoding/binary/ReadUvarint().
|
|
||||||
func (r *MemUvarintReader) ReadUvarint() (uint64, error) {
|
|
||||||
var x uint64
|
|
||||||
var s uint
|
|
||||||
var C = r.C
|
|
||||||
var S = r.S
|
|
||||||
|
|
||||||
for {
|
|
||||||
b := S[C]
|
|
||||||
C++
|
|
||||||
|
|
||||||
if b < 0x80 {
|
|
||||||
r.C = C
|
|
||||||
|
|
||||||
// why 63? The original code had an 'i += 1' loop var and
|
|
||||||
// checked for i > 9 || i == 9 ...; but, we no longer
|
|
||||||
// check for the i var, but instead check here for s,
|
|
||||||
// which is incremented by 7. So, 7*9 == 63.
|
|
||||||
//
|
|
||||||
// why the "extra" >= check? The normal case is that s <
|
|
||||||
// 63, so we check this single >= guard first so that we
|
|
||||||
// hit the normal, nil-error return pathway sooner.
|
|
||||||
if s >= 63 && (s > 63 || s == 63 && b > 1) {
|
|
||||||
return 0, ErrMemUvarintReaderOverflow
|
|
||||||
}
|
|
||||||
|
|
||||||
return x | uint64(b)<<s, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
x |= uint64(b&0x7f) << s
|
|
||||||
s += 7
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipUvarint skips ahead one encoded uint64.
|
|
||||||
func (r *MemUvarintReader) SkipUvarint() {
|
|
||||||
for {
|
|
||||||
b := r.S[r.C]
|
|
||||||
r.C++
|
|
||||||
|
|
||||||
if b < 0x80 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SkipBytes skips a count number of bytes.
|
|
||||||
func (r *MemUvarintReader) SkipBytes(count int) {
|
|
||||||
r.C = r.C + count
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *MemUvarintReader) Reset(s []byte) {
|
|
||||||
r.C = 0
|
|
||||||
r.S = s
|
|
||||||
}
|
|
58
vendor/github.com/blevesearch/bleve/index/scorch/segment/plugin.go
generated
vendored
58
vendor/github.com/blevesearch/bleve/index/scorch/segment/plugin.go
generated
vendored
|
@ -1,58 +0,0 @@
|
||||||
// Copyright (c) 2020 Couchbase, Inc.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package segment
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
|
||||||
"github.com/blevesearch/bleve/index"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Plugin represents the essential functions required by a package to plug in
|
|
||||||
// it's segment implementation
|
|
||||||
type Plugin interface {
|
|
||||||
|
|
||||||
// Type is the name for this segment plugin
|
|
||||||
Type() string
|
|
||||||
|
|
||||||
// Version is a numeric value identifying a specific version of this type.
|
|
||||||
// When incompatible changes are made to a particular type of plugin, the
|
|
||||||
// version must be incremented.
|
|
||||||
Version() uint32
|
|
||||||
|
|
||||||
// New takes a set of AnalysisResults and turns them into a new Segment
|
|
||||||
New(results []*index.AnalysisResult) (Segment, uint64, error)
|
|
||||||
|
|
||||||
// Open attempts to open the file at the specified path and
|
|
||||||
// return the corresponding Segment
|
|
||||||
Open(path string) (Segment, error)
|
|
||||||
|
|
||||||
// Merge takes a set of Segments, and creates a new segment on disk at
|
|
||||||
// the specified path.
|
|
||||||
// Drops is a set of bitmaps (one for each segment) indicating which
|
|
||||||
// documents can be dropped from the segments during the merge.
|
|
||||||
// If the closeCh channel is closed, Merge will cease doing work at
|
|
||||||
// the next opportunity, and return an error (closed).
|
|
||||||
// StatsReporter can optionally be provided, in which case progress
|
|
||||||
// made during the merge is reported while operation continues.
|
|
||||||
// Returns:
|
|
||||||
// A slice of new document numbers (one for each input segment),
|
|
||||||
// this allows the caller to know a particular document's new
|
|
||||||
// document number in the newly merged segment.
|
|
||||||
// The number of bytes written to the new segment file.
|
|
||||||
// An error, if any occurred.
|
|
||||||
Merge(segments []Segment, drops []*roaring.Bitmap, path string,
|
|
||||||
closeCh chan struct{}, s StatsReporter) (
|
|
||||||
[][]uint64, uint64, error)
|
|
||||||
}
|
|
93
vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go
generated
vendored
93
vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go
generated
vendored
|
@ -1,93 +0,0 @@
|
||||||
// Copyright (c) 2019 Couchbase, Inc.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package scorch
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
|
||||||
|
|
||||||
zapv11 "github.com/blevesearch/zap/v11"
|
|
||||||
zapv12 "github.com/blevesearch/zap/v12"
|
|
||||||
zapv13 "github.com/blevesearch/zap/v13"
|
|
||||||
zapv14 "github.com/blevesearch/zap/v14"
|
|
||||||
zapv15 "github.com/blevesearch/zap/v15"
|
|
||||||
)
|
|
||||||
|
|
||||||
var supportedSegmentPlugins map[string]map[uint32]segment.Plugin
|
|
||||||
var defaultSegmentPlugin segment.Plugin
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
ResetPlugins()
|
|
||||||
RegisterPlugin(zapv15.Plugin(), false)
|
|
||||||
RegisterPlugin(zapv14.Plugin(), false)
|
|
||||||
RegisterPlugin(zapv13.Plugin(), false)
|
|
||||||
RegisterPlugin(zapv12.Plugin(), false)
|
|
||||||
RegisterPlugin(zapv11.Plugin(), true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ResetPlugins() {
|
|
||||||
supportedSegmentPlugins = map[string]map[uint32]segment.Plugin{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func RegisterPlugin(plugin segment.Plugin, makeDefault bool) {
|
|
||||||
if _, ok := supportedSegmentPlugins[plugin.Type()]; !ok {
|
|
||||||
supportedSegmentPlugins[plugin.Type()] = map[uint32]segment.Plugin{}
|
|
||||||
}
|
|
||||||
supportedSegmentPlugins[plugin.Type()][plugin.Version()] = plugin
|
|
||||||
if makeDefault {
|
|
||||||
defaultSegmentPlugin = plugin
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func SupportedSegmentTypes() (rv []string) {
|
|
||||||
for k := range supportedSegmentPlugins {
|
|
||||||
rv = append(rv, k)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func SupportedSegmentTypeVersions(typ string) (rv []uint32) {
|
|
||||||
for k := range supportedSegmentPlugins[typ] {
|
|
||||||
rv = append(rv, k)
|
|
||||||
}
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func chooseSegmentPlugin(forcedSegmentType string,
|
|
||||||
forcedSegmentVersion uint32) (segment.Plugin, error) {
|
|
||||||
if versions, ok := supportedSegmentPlugins[forcedSegmentType]; ok {
|
|
||||||
if segPlugin, ok := versions[uint32(forcedSegmentVersion)]; ok {
|
|
||||||
return segPlugin, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf(
|
|
||||||
"unsupported version %d for segment type: %s, supported: %v",
|
|
||||||
forcedSegmentVersion, forcedSegmentType,
|
|
||||||
SupportedSegmentTypeVersions(forcedSegmentType))
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("unsupported segment type: %s, supported: %v",
|
|
||||||
forcedSegmentType, SupportedSegmentTypes())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Scorch) loadSegmentPlugin(forcedSegmentType string,
|
|
||||||
forcedSegmentVersion uint32) error {
|
|
||||||
segPlugin, err := chooseSegmentPlugin(forcedSegmentType,
|
|
||||||
forcedSegmentVersion)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
s.segPlugin = segPlugin
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -11,8 +11,6 @@
|
||||||
|
|
||||||
modern text indexing in go - [blevesearch.com](http://www.blevesearch.com/)
|
modern text indexing in go - [blevesearch.com](http://www.blevesearch.com/)
|
||||||
|
|
||||||
Try out bleve live by [searching the bleve website](http://www.blevesearch.com/search/?q=bleve).
|
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
* Index any go data structure (including JSON)
|
* Index any go data structure (including JSON)
|
|
@ -17,8 +17,8 @@ package custom
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "custom"
|
const Name = "custom"
|
|
@ -15,9 +15,9 @@
|
||||||
package keyword
|
package keyword
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/analysis/tokenizer/single"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/single"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "keyword"
|
const Name = "keyword"
|
|
@ -15,11 +15,11 @@
|
||||||
package standard
|
package standard
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/analysis/lang/en"
|
"github.com/blevesearch/bleve/v2/analysis/lang/en"
|
||||||
"github.com/blevesearch/bleve/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "standard"
|
const Name = "standard"
|
|
@ -18,8 +18,8 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "flexiblego"
|
const Name = "flexiblego"
|
|
@ -17,9 +17,9 @@ package optional
|
||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/analysis/datetime/flexible"
|
"github.com/blevesearch/bleve/v2/analysis/datetime/flexible"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "dateTimeOptional"
|
const Name = "dateTimeOptional"
|
70
vendor/github.com/blevesearch/bleve/v2/analysis/freq.go
generated
vendored
Normal file
70
vendor/github.com/blevesearch/bleve/v2/analysis/freq.go
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
// Copyright (c) 2014 Couchbase, Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package analysis
|
||||||
|
|
||||||
|
import (
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TokenFrequency(tokens TokenStream, arrayPositions []uint64, options index.FieldIndexingOptions) index.TokenFrequencies {
|
||||||
|
rv := make(map[string]*index.TokenFreq, len(tokens))
|
||||||
|
|
||||||
|
if options.IncludeTermVectors() {
|
||||||
|
tls := make([]index.TokenLocation, len(tokens))
|
||||||
|
tlNext := 0
|
||||||
|
|
||||||
|
for _, token := range tokens {
|
||||||
|
tls[tlNext] = index.TokenLocation{
|
||||||
|
ArrayPositions: arrayPositions,
|
||||||
|
Start: token.Start,
|
||||||
|
End: token.End,
|
||||||
|
Position: token.Position,
|
||||||
|
}
|
||||||
|
|
||||||
|
curr, ok := rv[string(token.Term)]
|
||||||
|
if ok {
|
||||||
|
curr.Locations = append(curr.Locations, &tls[tlNext])
|
||||||
|
} else {
|
||||||
|
curr = &index.TokenFreq{
|
||||||
|
Term: token.Term,
|
||||||
|
Locations: []*index.TokenLocation{&tls[tlNext]},
|
||||||
|
}
|
||||||
|
rv[string(token.Term)] = curr
|
||||||
|
}
|
||||||
|
|
||||||
|
if !options.SkipFreqNorm() {
|
||||||
|
curr.SetFrequency(curr.Frequency() + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
tlNext++
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, token := range tokens {
|
||||||
|
curr, exists := rv[string(token.Term)]
|
||||||
|
if !exists {
|
||||||
|
curr = &index.TokenFreq{
|
||||||
|
Term: token.Term,
|
||||||
|
}
|
||||||
|
rv[string(token.Term)] = curr
|
||||||
|
}
|
||||||
|
|
||||||
|
if !options.SkipFreqNorm() {
|
||||||
|
curr.SetFrequency(curr.Frequency() + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rv
|
||||||
|
}
|
|
@ -22,12 +22,12 @@
|
||||||
package en
|
package en
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/analysis/token/porter"
|
"github.com/blevesearch/bleve/v2/analysis/token/porter"
|
||||||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
)
|
)
|
||||||
|
|
||||||
const AnalyzerName = "en"
|
const AnalyzerName = "en"
|
|
@ -17,8 +17,8 @@ package en
|
||||||
import (
|
import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
// PossessiveName is the name PossessiveFilter is registered as
|
// PossessiveName is the name PossessiveFilter is registered as
|
|
@ -15,8 +15,8 @@
|
||||||
package en
|
package en
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
|
|
||||||
"github.com/blevesearch/snowballstem"
|
"github.com/blevesearch/snowballstem"
|
||||||
"github.com/blevesearch/snowballstem/english"
|
"github.com/blevesearch/snowballstem/english"
|
|
@ -15,9 +15,9 @@
|
||||||
package en
|
package en
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/analysis/token/stop"
|
"github.com/blevesearch/bleve/v2/analysis/token/stop"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
|
func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
|
|
@ -1,8 +1,8 @@
|
||||||
package en
|
package en
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const StopName = "stop_en"
|
const StopName = "stop_en"
|
|
@ -21,8 +21,8 @@ import (
|
||||||
"unicode"
|
"unicode"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Name is the name used to register LowerCaseFilter in the bleve registry
|
// Name is the name used to register LowerCaseFilter in the bleve registry
|
|
@ -17,8 +17,8 @@ package porter
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
|
|
||||||
"github.com/blevesearch/go-porterstemmer"
|
"github.com/blevesearch/go-porterstemmer"
|
||||||
)
|
)
|
|
@ -24,8 +24,8 @@ package stop
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "stop_tokens"
|
const Name = "stop_tokens"
|
|
@ -17,8 +17,8 @@ package unicodenorm
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
"golang.org/x/text/unicode/norm"
|
"golang.org/x/text/unicode/norm"
|
||||||
)
|
)
|
||||||
|
|
|
@ -15,8 +15,8 @@
|
||||||
package single
|
package single
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "single"
|
const Name = "single"
|
|
@ -17,8 +17,8 @@ package unicode
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/segment"
|
"github.com/blevesearch/segment"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
)
|
)
|
||||||
|
|
||||||
const Name = "unicode"
|
const Name = "unicode"
|
|
@ -18,10 +18,10 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/document"
|
"github.com/blevesearch/bleve/v2/document"
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/index/scorch"
|
||||||
"github.com/blevesearch/bleve/index/scorch"
|
"github.com/blevesearch/bleve/v2/mapping"
|
||||||
"github.com/blevesearch/bleve/mapping"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type builderImpl struct {
|
type builderImpl struct {
|
|
@ -20,14 +20,11 @@ import (
|
||||||
"log"
|
"log"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/index/scorch"
|
||||||
"github.com/blevesearch/bleve/index/store/gtreap"
|
"github.com/blevesearch/bleve/v2/index/upsidedown/store/gtreap"
|
||||||
"github.com/blevesearch/bleve/index/upsidedown"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/bleve/v2/search/highlight/highlighter/html"
|
||||||
"github.com/blevesearch/bleve/search/highlight/highlighter/html"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
|
|
||||||
// force import of scorch so its accessible by default
|
|
||||||
_ "github.com/blevesearch/bleve/index/scorch"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var bleveExpVar = expvar.NewMap("bleve")
|
var bleveExpVar = expvar.NewMap("bleve")
|
||||||
|
@ -79,7 +76,7 @@ func init() {
|
||||||
Config.DefaultMemKVStore = gtreap.Name
|
Config.DefaultMemKVStore = gtreap.Name
|
||||||
|
|
||||||
// default index
|
// default index
|
||||||
Config.DefaultIndexType = upsidedown.Name
|
Config.DefaultIndexType = scorch.Name
|
||||||
|
|
||||||
bootDuration := time.Since(bootStart)
|
bootDuration := time.Since(bootStart)
|
||||||
bleveExpVar.Add("bootDuration", int64(bootDuration))
|
bleveExpVar.Add("bootDuration", int64(bootDuration))
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
package bleve
|
package bleve
|
||||||
|
|
||||||
import "github.com/blevesearch/bleve/index/store/boltdb"
|
import "github.com/blevesearch/bleve/v2/index/upsidedown/store/boltdb"
|
||||||
|
|
||||||
// in normal environments we configure boltdb as the default storage
|
// in normal environments we configure boltdb as the default storage
|
||||||
func initDisk() {
|
func initDisk() {
|
|
@ -18,7 +18,8 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeDocument int
|
var reflectStaticSizeDocument int
|
||||||
|
@ -29,14 +30,14 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Document struct {
|
type Document struct {
|
||||||
ID string `json:"id"`
|
id string `json:"id"`
|
||||||
Fields []Field `json:"fields"`
|
Fields []Field `json:"fields"`
|
||||||
CompositeFields []*CompositeField
|
CompositeFields []*CompositeField
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDocument(id string) *Document {
|
func NewDocument(id string) *Document {
|
||||||
return &Document{
|
return &Document{
|
||||||
ID: id,
|
id: id,
|
||||||
Fields: make([]Field, 0),
|
Fields: make([]Field, 0),
|
||||||
CompositeFields: make([]*CompositeField, 0),
|
CompositeFields: make([]*CompositeField, 0),
|
||||||
}
|
}
|
||||||
|
@ -44,7 +45,7 @@ func NewDocument(id string) *Document {
|
||||||
|
|
||||||
func (d *Document) Size() int {
|
func (d *Document) Size() int {
|
||||||
sizeInBytes := reflectStaticSizeDocument + size.SizeOfPtr +
|
sizeInBytes := reflectStaticSizeDocument + size.SizeOfPtr +
|
||||||
len(d.ID)
|
len(d.id)
|
||||||
|
|
||||||
for _, entry := range d.Fields {
|
for _, entry := range d.Fields {
|
||||||
sizeInBytes += entry.Size()
|
sizeInBytes += entry.Size()
|
||||||
|
@ -82,7 +83,7 @@ func (d *Document) GoString() string {
|
||||||
}
|
}
|
||||||
compositeFields += fmt.Sprintf("%#v", field)
|
compositeFields += fmt.Sprintf("%#v", field)
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("&document.Document{ID:%s, Fields: %s, CompositeFields: %s}", d.ID, fields, compositeFields)
|
return fmt.Sprintf("&document.Document{ID:%s, Fields: %s, CompositeFields: %s}", d.ID(), fields, compositeFields)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Document) NumPlainTextBytes() uint64 {
|
func (d *Document) NumPlainTextBytes() uint64 {
|
||||||
|
@ -99,3 +100,31 @@ func (d *Document) NumPlainTextBytes() uint64 {
|
||||||
}
|
}
|
||||||
return rv
|
return rv
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Document) ID() string {
|
||||||
|
return d.id
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) SetID(id string) {
|
||||||
|
d.id = id
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) AddIDField() {
|
||||||
|
d.AddField(NewTextFieldCustom("_id", nil, []byte(d.ID()), index.IndexField|index.StoreField, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) VisitFields(visitor index.FieldVisitor) {
|
||||||
|
for _, f := range d.Fields {
|
||||||
|
visitor(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) VisitComposite(visitor index.CompositeFieldVisitor) {
|
||||||
|
for _, f := range d.CompositeFields {
|
||||||
|
visitor(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) HasComposite() bool {
|
||||||
|
return len(d.CompositeFields) > 0
|
||||||
|
}
|
|
@ -15,7 +15,7 @@
|
||||||
package document
|
package document
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Field interface {
|
type Field interface {
|
||||||
|
@ -28,8 +28,8 @@ type Field interface {
|
||||||
// arrays, ArrayPositions returns 2 indices used to resolve "doc2" value in
|
// arrays, ArrayPositions returns 2 indices used to resolve "doc2" value in
|
||||||
// "doc1", then "field" in "doc2".
|
// "doc1", then "field" in "doc2".
|
||||||
ArrayPositions() []uint64
|
ArrayPositions() []uint64
|
||||||
Options() IndexingOptions
|
Options() index.FieldIndexingOptions
|
||||||
Analyze() (int, analysis.TokenFrequencies)
|
Analyze()
|
||||||
Value() []byte
|
Value() []byte
|
||||||
|
|
||||||
// NumPlainTextBytes should return the number of plain text bytes
|
// NumPlainTextBytes should return the number of plain text bytes
|
||||||
|
@ -38,4 +38,8 @@ type Field interface {
|
||||||
NumPlainTextBytes() uint64
|
NumPlainTextBytes() uint64
|
||||||
|
|
||||||
Size() int
|
Size() int
|
||||||
|
|
||||||
|
EncodedFieldType() byte
|
||||||
|
AnalyzedLength() int
|
||||||
|
AnalyzedTokenFrequencies() index.TokenFrequencies
|
||||||
}
|
}
|
|
@ -18,8 +18,9 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeBooleanField int
|
var reflectStaticSizeBooleanField int
|
||||||
|
@ -29,14 +30,16 @@ func init() {
|
||||||
reflectStaticSizeBooleanField = int(reflect.TypeOf(f).Size())
|
reflectStaticSizeBooleanField = int(reflect.TypeOf(f).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultBooleanIndexingOptions = StoreField | IndexField | DocValues
|
const DefaultBooleanIndexingOptions = index.StoreField | index.IndexField | index.DocValues
|
||||||
|
|
||||||
type BooleanField struct {
|
type BooleanField struct {
|
||||||
name string
|
name string
|
||||||
arrayPositions []uint64
|
arrayPositions []uint64
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
value []byte
|
value []byte
|
||||||
numPlainTextBytes uint64
|
numPlainTextBytes uint64
|
||||||
|
length int
|
||||||
|
frequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BooleanField) Size() int {
|
func (b *BooleanField) Size() int {
|
||||||
|
@ -54,11 +57,11 @@ func (b *BooleanField) ArrayPositions() []uint64 {
|
||||||
return b.arrayPositions
|
return b.arrayPositions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BooleanField) Options() IndexingOptions {
|
func (b *BooleanField) Options() index.FieldIndexingOptions {
|
||||||
return b.options
|
return b.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BooleanField) Analyze() (int, analysis.TokenFrequencies) {
|
func (b *BooleanField) Analyze() {
|
||||||
tokens := make(analysis.TokenStream, 0)
|
tokens := make(analysis.TokenStream, 0)
|
||||||
tokens = append(tokens, &analysis.Token{
|
tokens = append(tokens, &analysis.Token{
|
||||||
Start: 0,
|
Start: 0,
|
||||||
|
@ -68,9 +71,8 @@ func (b *BooleanField) Analyze() (int, analysis.TokenFrequencies) {
|
||||||
Type: analysis.Boolean,
|
Type: analysis.Boolean,
|
||||||
})
|
})
|
||||||
|
|
||||||
fieldLength := len(tokens)
|
b.length = len(tokens)
|
||||||
tokenFreqs := analysis.TokenFrequency(tokens, b.arrayPositions, b.options.IncludeTermVectors())
|
b.frequencies = analysis.TokenFrequency(tokens, b.arrayPositions, b.options)
|
||||||
return fieldLength, tokenFreqs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BooleanField) Value() []byte {
|
func (b *BooleanField) Value() []byte {
|
||||||
|
@ -92,6 +94,18 @@ func (b *BooleanField) NumPlainTextBytes() uint64 {
|
||||||
return b.numPlainTextBytes
|
return b.numPlainTextBytes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *BooleanField) EncodedFieldType() byte {
|
||||||
|
return 'b'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *BooleanField) AnalyzedLength() int {
|
||||||
|
return b.length
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *BooleanField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return b.frequencies
|
||||||
|
}
|
||||||
|
|
||||||
func NewBooleanFieldFromBytes(name string, arrayPositions []uint64, value []byte) *BooleanField {
|
func NewBooleanFieldFromBytes(name string, arrayPositions []uint64, value []byte) *BooleanField {
|
||||||
return &BooleanField{
|
return &BooleanField{
|
||||||
name: name,
|
name: name,
|
||||||
|
@ -106,7 +120,7 @@ func NewBooleanField(name string, arrayPositions []uint64, b bool) *BooleanField
|
||||||
return NewBooleanFieldWithIndexingOptions(name, arrayPositions, b, DefaultNumericIndexingOptions)
|
return NewBooleanFieldWithIndexingOptions(name, arrayPositions, b, DefaultNumericIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBooleanFieldWithIndexingOptions(name string, arrayPositions []uint64, b bool, options IndexingOptions) *BooleanField {
|
func NewBooleanFieldWithIndexingOptions(name string, arrayPositions []uint64, b bool, options index.FieldIndexingOptions) *BooleanField {
|
||||||
numPlainTextBytes := 5
|
numPlainTextBytes := 5
|
||||||
v := []byte("F")
|
v := []byte("F")
|
||||||
if b {
|
if b {
|
|
@ -17,8 +17,8 @@ package document
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/size"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeCompositeField int
|
var reflectStaticSizeCompositeField int
|
||||||
|
@ -28,30 +28,30 @@ func init() {
|
||||||
reflectStaticSizeCompositeField = int(reflect.TypeOf(cf).Size())
|
reflectStaticSizeCompositeField = int(reflect.TypeOf(cf).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultCompositeIndexingOptions = IndexField
|
const DefaultCompositeIndexingOptions = index.IndexField
|
||||||
|
|
||||||
type CompositeField struct {
|
type CompositeField struct {
|
||||||
name string
|
name string
|
||||||
includedFields map[string]bool
|
includedFields map[string]bool
|
||||||
excludedFields map[string]bool
|
excludedFields map[string]bool
|
||||||
defaultInclude bool
|
defaultInclude bool
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
totalLength int
|
totalLength int
|
||||||
compositeFrequencies analysis.TokenFrequencies
|
compositeFrequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCompositeField(name string, defaultInclude bool, include []string, exclude []string) *CompositeField {
|
func NewCompositeField(name string, defaultInclude bool, include []string, exclude []string) *CompositeField {
|
||||||
return NewCompositeFieldWithIndexingOptions(name, defaultInclude, include, exclude, DefaultCompositeIndexingOptions)
|
return NewCompositeFieldWithIndexingOptions(name, defaultInclude, include, exclude, DefaultCompositeIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCompositeFieldWithIndexingOptions(name string, defaultInclude bool, include []string, exclude []string, options IndexingOptions) *CompositeField {
|
func NewCompositeFieldWithIndexingOptions(name string, defaultInclude bool, include []string, exclude []string, options index.FieldIndexingOptions) *CompositeField {
|
||||||
rv := &CompositeField{
|
rv := &CompositeField{
|
||||||
name: name,
|
name: name,
|
||||||
options: options,
|
options: options,
|
||||||
defaultInclude: defaultInclude,
|
defaultInclude: defaultInclude,
|
||||||
includedFields: make(map[string]bool, len(include)),
|
includedFields: make(map[string]bool, len(include)),
|
||||||
excludedFields: make(map[string]bool, len(exclude)),
|
excludedFields: make(map[string]bool, len(exclude)),
|
||||||
compositeFrequencies: make(analysis.TokenFrequencies),
|
compositeFrequencies: make(index.TokenFrequencies),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, i := range include {
|
for _, i := range include {
|
||||||
|
@ -87,12 +87,11 @@ func (c *CompositeField) ArrayPositions() []uint64 {
|
||||||
return []uint64{}
|
return []uint64{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CompositeField) Options() IndexingOptions {
|
func (c *CompositeField) Options() index.FieldIndexingOptions {
|
||||||
return c.options
|
return c.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CompositeField) Analyze() (int, analysis.TokenFrequencies) {
|
func (c *CompositeField) Analyze() {
|
||||||
return c.totalLength, c.compositeFrequencies
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CompositeField) Value() []byte {
|
func (c *CompositeField) Value() []byte {
|
||||||
|
@ -116,9 +115,21 @@ func (c *CompositeField) includesField(field string) bool {
|
||||||
return shouldInclude
|
return shouldInclude
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CompositeField) Compose(field string, length int, freq analysis.TokenFrequencies) {
|
func (c *CompositeField) Compose(field string, length int, freq index.TokenFrequencies) {
|
||||||
if c.includesField(field) {
|
if c.includesField(field) {
|
||||||
c.totalLength += length
|
c.totalLength += length
|
||||||
c.compositeFrequencies.MergeAll(field, freq)
|
c.compositeFrequencies.MergeAll(field, freq)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *CompositeField) EncodedFieldType() byte {
|
||||||
|
return 'c'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CompositeField) AnalyzedLength() int {
|
||||||
|
return c.totalLength
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CompositeField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return c.compositeFrequencies
|
||||||
|
}
|
|
@ -20,9 +20,10 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/numeric"
|
"github.com/blevesearch/bleve/v2/numeric"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeDateTimeField int
|
var reflectStaticSizeDateTimeField int
|
||||||
|
@ -32,7 +33,7 @@ func init() {
|
||||||
reflectStaticSizeDateTimeField = int(reflect.TypeOf(f).Size())
|
reflectStaticSizeDateTimeField = int(reflect.TypeOf(f).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultDateTimeIndexingOptions = StoreField | IndexField | DocValues
|
const DefaultDateTimeIndexingOptions = index.StoreField | index.IndexField | index.DocValues
|
||||||
const DefaultDateTimePrecisionStep uint = 4
|
const DefaultDateTimePrecisionStep uint = 4
|
||||||
|
|
||||||
var MinTimeRepresentable = time.Unix(0, math.MinInt64)
|
var MinTimeRepresentable = time.Unix(0, math.MinInt64)
|
||||||
|
@ -41,9 +42,11 @@ var MaxTimeRepresentable = time.Unix(0, math.MaxInt64)
|
||||||
type DateTimeField struct {
|
type DateTimeField struct {
|
||||||
name string
|
name string
|
||||||
arrayPositions []uint64
|
arrayPositions []uint64
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
value numeric.PrefixCoded
|
value numeric.PrefixCoded
|
||||||
numPlainTextBytes uint64
|
numPlainTextBytes uint64
|
||||||
|
length int
|
||||||
|
frequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *DateTimeField) Size() int {
|
func (n *DateTimeField) Size() int {
|
||||||
|
@ -60,11 +63,23 @@ func (n *DateTimeField) ArrayPositions() []uint64 {
|
||||||
return n.arrayPositions
|
return n.arrayPositions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *DateTimeField) Options() IndexingOptions {
|
func (n *DateTimeField) Options() index.FieldIndexingOptions {
|
||||||
return n.options
|
return n.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *DateTimeField) Analyze() (int, analysis.TokenFrequencies) {
|
func (n *DateTimeField) EncodedFieldType() byte {
|
||||||
|
return 'd'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *DateTimeField) AnalyzedLength() int {
|
||||||
|
return n.length
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *DateTimeField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return n.frequencies
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *DateTimeField) Analyze() {
|
||||||
tokens := make(analysis.TokenStream, 0)
|
tokens := make(analysis.TokenStream, 0)
|
||||||
tokens = append(tokens, &analysis.Token{
|
tokens = append(tokens, &analysis.Token{
|
||||||
Start: 0,
|
Start: 0,
|
||||||
|
@ -95,9 +110,8 @@ func (n *DateTimeField) Analyze() (int, analysis.TokenFrequencies) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fieldLength := len(tokens)
|
n.length = len(tokens)
|
||||||
tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors())
|
n.frequencies = analysis.TokenFrequency(tokens, n.arrayPositions, n.options)
|
||||||
return fieldLength, tokenFreqs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *DateTimeField) Value() []byte {
|
func (n *DateTimeField) Value() []byte {
|
||||||
|
@ -134,7 +148,7 @@ func NewDateTimeField(name string, arrayPositions []uint64, dt time.Time) (*Date
|
||||||
return NewDateTimeFieldWithIndexingOptions(name, arrayPositions, dt, DefaultDateTimeIndexingOptions)
|
return NewDateTimeFieldWithIndexingOptions(name, arrayPositions, dt, DefaultDateTimeIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDateTimeFieldWithIndexingOptions(name string, arrayPositions []uint64, dt time.Time, options IndexingOptions) (*DateTimeField, error) {
|
func NewDateTimeFieldWithIndexingOptions(name string, arrayPositions []uint64, dt time.Time, options index.FieldIndexingOptions) (*DateTimeField, error) {
|
||||||
if canRepresent(dt) {
|
if canRepresent(dt) {
|
||||||
dtInt64 := dt.UnixNano()
|
dtInt64 := dt.UnixNano()
|
||||||
prefixCoded := numeric.MustNewPrefixCodedInt64(dtInt64, 0)
|
prefixCoded := numeric.MustNewPrefixCodedInt64(dtInt64, 0)
|
|
@ -18,10 +18,11 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/geo"
|
"github.com/blevesearch/bleve/v2/geo"
|
||||||
"github.com/blevesearch/bleve/numeric"
|
"github.com/blevesearch/bleve/v2/numeric"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeGeoPointField int
|
var reflectStaticSizeGeoPointField int
|
||||||
|
@ -36,9 +37,11 @@ var GeoPrecisionStep uint = 9
|
||||||
type GeoPointField struct {
|
type GeoPointField struct {
|
||||||
name string
|
name string
|
||||||
arrayPositions []uint64
|
arrayPositions []uint64
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
value numeric.PrefixCoded
|
value numeric.PrefixCoded
|
||||||
numPlainTextBytes uint64
|
numPlainTextBytes uint64
|
||||||
|
length int
|
||||||
|
frequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *GeoPointField) Size() int {
|
func (n *GeoPointField) Size() int {
|
||||||
|
@ -55,11 +58,23 @@ func (n *GeoPointField) ArrayPositions() []uint64 {
|
||||||
return n.arrayPositions
|
return n.arrayPositions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *GeoPointField) Options() IndexingOptions {
|
func (n *GeoPointField) Options() index.FieldIndexingOptions {
|
||||||
return n.options
|
return n.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *GeoPointField) Analyze() (int, analysis.TokenFrequencies) {
|
func (n *GeoPointField) EncodedFieldType() byte {
|
||||||
|
return 'g'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *GeoPointField) AnalyzedLength() int {
|
||||||
|
return n.length
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *GeoPointField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return n.frequencies
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *GeoPointField) Analyze() {
|
||||||
tokens := make(analysis.TokenStream, 0)
|
tokens := make(analysis.TokenStream, 0)
|
||||||
tokens = append(tokens, &analysis.Token{
|
tokens = append(tokens, &analysis.Token{
|
||||||
Start: 0,
|
Start: 0,
|
||||||
|
@ -90,9 +105,8 @@ func (n *GeoPointField) Analyze() (int, analysis.TokenFrequencies) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fieldLength := len(tokens)
|
n.length = len(tokens)
|
||||||
tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors())
|
n.frequencies = analysis.TokenFrequency(tokens, n.arrayPositions, n.options)
|
||||||
return fieldLength, tokenFreqs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *GeoPointField) Value() []byte {
|
func (n *GeoPointField) Value() []byte {
|
||||||
|
@ -137,7 +151,7 @@ func NewGeoPointField(name string, arrayPositions []uint64, lon, lat float64) *G
|
||||||
return NewGeoPointFieldWithIndexingOptions(name, arrayPositions, lon, lat, DefaultNumericIndexingOptions)
|
return NewGeoPointFieldWithIndexingOptions(name, arrayPositions, lon, lat, DefaultNumericIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewGeoPointFieldWithIndexingOptions(name string, arrayPositions []uint64, lon, lat float64, options IndexingOptions) *GeoPointField {
|
func NewGeoPointFieldWithIndexingOptions(name string, arrayPositions []uint64, lon, lat float64, options index.FieldIndexingOptions) *GeoPointField {
|
||||||
mhash := geo.MortonHash(lon, lat)
|
mhash := geo.MortonHash(lon, lat)
|
||||||
prefixCoded := numeric.MustNewPrefixCodedInt64(int64(mhash), 0)
|
prefixCoded := numeric.MustNewPrefixCodedInt64(int64(mhash), 0)
|
||||||
return &GeoPointField{
|
return &GeoPointField{
|
|
@ -18,9 +18,10 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/numeric"
|
"github.com/blevesearch/bleve/v2/numeric"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeNumericField int
|
var reflectStaticSizeNumericField int
|
||||||
|
@ -30,16 +31,18 @@ func init() {
|
||||||
reflectStaticSizeNumericField = int(reflect.TypeOf(f).Size())
|
reflectStaticSizeNumericField = int(reflect.TypeOf(f).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultNumericIndexingOptions = StoreField | IndexField | DocValues
|
const DefaultNumericIndexingOptions = index.StoreField | index.IndexField | index.DocValues
|
||||||
|
|
||||||
const DefaultPrecisionStep uint = 4
|
const DefaultPrecisionStep uint = 4
|
||||||
|
|
||||||
type NumericField struct {
|
type NumericField struct {
|
||||||
name string
|
name string
|
||||||
arrayPositions []uint64
|
arrayPositions []uint64
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
value numeric.PrefixCoded
|
value numeric.PrefixCoded
|
||||||
numPlainTextBytes uint64
|
numPlainTextBytes uint64
|
||||||
|
length int
|
||||||
|
frequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NumericField) Size() int {
|
func (n *NumericField) Size() int {
|
||||||
|
@ -56,11 +59,23 @@ func (n *NumericField) ArrayPositions() []uint64 {
|
||||||
return n.arrayPositions
|
return n.arrayPositions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NumericField) Options() IndexingOptions {
|
func (n *NumericField) Options() index.FieldIndexingOptions {
|
||||||
return n.options
|
return n.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NumericField) Analyze() (int, analysis.TokenFrequencies) {
|
func (n *NumericField) EncodedFieldType() byte {
|
||||||
|
return 'n'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *NumericField) AnalyzedLength() int {
|
||||||
|
return n.length
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *NumericField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return n.frequencies
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *NumericField) Analyze() {
|
||||||
tokens := make(analysis.TokenStream, 0)
|
tokens := make(analysis.TokenStream, 0)
|
||||||
tokens = append(tokens, &analysis.Token{
|
tokens = append(tokens, &analysis.Token{
|
||||||
Start: 0,
|
Start: 0,
|
||||||
|
@ -91,9 +106,8 @@ func (n *NumericField) Analyze() (int, analysis.TokenFrequencies) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fieldLength := len(tokens)
|
n.length = len(tokens)
|
||||||
tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors())
|
n.frequencies = analysis.TokenFrequency(tokens, n.arrayPositions, n.options)
|
||||||
return fieldLength, tokenFreqs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NumericField) Value() []byte {
|
func (n *NumericField) Value() []byte {
|
||||||
|
@ -130,7 +144,7 @@ func NewNumericField(name string, arrayPositions []uint64, number float64) *Nume
|
||||||
return NewNumericFieldWithIndexingOptions(name, arrayPositions, number, DefaultNumericIndexingOptions)
|
return NewNumericFieldWithIndexingOptions(name, arrayPositions, number, DefaultNumericIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewNumericFieldWithIndexingOptions(name string, arrayPositions []uint64, number float64, options IndexingOptions) *NumericField {
|
func NewNumericFieldWithIndexingOptions(name string, arrayPositions []uint64, number float64, options index.FieldIndexingOptions) *NumericField {
|
||||||
numberInt64 := numeric.Float64ToInt64(number)
|
numberInt64 := numeric.Float64ToInt64(number)
|
||||||
prefixCoded := numeric.MustNewPrefixCodedInt64(numberInt64, 0)
|
prefixCoded := numeric.MustNewPrefixCodedInt64(numberInt64, 0)
|
||||||
return &NumericField{
|
return &NumericField{
|
|
@ -18,8 +18,9 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeTextField int
|
var reflectStaticSizeTextField int
|
||||||
|
@ -29,15 +30,17 @@ func init() {
|
||||||
reflectStaticSizeTextField = int(reflect.TypeOf(f).Size())
|
reflectStaticSizeTextField = int(reflect.TypeOf(f).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultTextIndexingOptions = IndexField | DocValues
|
const DefaultTextIndexingOptions = index.IndexField | index.DocValues
|
||||||
|
|
||||||
type TextField struct {
|
type TextField struct {
|
||||||
name string
|
name string
|
||||||
arrayPositions []uint64
|
arrayPositions []uint64
|
||||||
options IndexingOptions
|
options index.FieldIndexingOptions
|
||||||
analyzer *analysis.Analyzer
|
analyzer *analysis.Analyzer
|
||||||
value []byte
|
value []byte
|
||||||
numPlainTextBytes uint64
|
numPlainTextBytes uint64
|
||||||
|
length int
|
||||||
|
frequencies index.TokenFrequencies
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TextField) Size() int {
|
func (t *TextField) Size() int {
|
||||||
|
@ -55,11 +58,23 @@ func (t *TextField) ArrayPositions() []uint64 {
|
||||||
return t.arrayPositions
|
return t.arrayPositions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TextField) Options() IndexingOptions {
|
func (t *TextField) Options() index.FieldIndexingOptions {
|
||||||
return t.options
|
return t.options
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TextField) Analyze() (int, analysis.TokenFrequencies) {
|
func (t *TextField) EncodedFieldType() byte {
|
||||||
|
return 't'
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TextField) AnalyzedLength() int {
|
||||||
|
return t.length
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TextField) AnalyzedTokenFrequencies() index.TokenFrequencies {
|
||||||
|
return t.frequencies
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TextField) Analyze() {
|
||||||
var tokens analysis.TokenStream
|
var tokens analysis.TokenStream
|
||||||
if t.analyzer != nil {
|
if t.analyzer != nil {
|
||||||
bytesToAnalyze := t.Value()
|
bytesToAnalyze := t.Value()
|
||||||
|
@ -81,9 +96,8 @@ func (t *TextField) Analyze() (int, analysis.TokenFrequencies) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fieldLength := len(tokens) // number of tokens in this doc field
|
t.length = len(tokens) // number of tokens in this doc field
|
||||||
tokenFreqs := analysis.TokenFrequency(tokens, t.arrayPositions, t.options.IncludeTermVectors())
|
t.frequencies = analysis.TokenFrequency(tokens, t.arrayPositions, t.options)
|
||||||
return fieldLength, tokenFreqs
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TextField) Analyzer() *analysis.Analyzer {
|
func (t *TextField) Analyzer() *analysis.Analyzer {
|
||||||
|
@ -94,6 +108,10 @@ func (t *TextField) Value() []byte {
|
||||||
return t.value
|
return t.value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *TextField) Text() string {
|
||||||
|
return string(t.value)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *TextField) GoString() string {
|
func (t *TextField) GoString() string {
|
||||||
return fmt.Sprintf("&document.TextField{Name:%s, Options: %s, Analyzer: %v, Value: %s, ArrayPositions: %v}", t.name, t.options, t.analyzer, t.value, t.arrayPositions)
|
return fmt.Sprintf("&document.TextField{Name:%s, Options: %s, Analyzer: %v, Value: %s, ArrayPositions: %v}", t.name, t.options, t.analyzer, t.value, t.arrayPositions)
|
||||||
}
|
}
|
||||||
|
@ -106,7 +124,7 @@ func NewTextField(name string, arrayPositions []uint64, value []byte) *TextField
|
||||||
return NewTextFieldWithIndexingOptions(name, arrayPositions, value, DefaultTextIndexingOptions)
|
return NewTextFieldWithIndexingOptions(name, arrayPositions, value, DefaultTextIndexingOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTextFieldWithIndexingOptions(name string, arrayPositions []uint64, value []byte, options IndexingOptions) *TextField {
|
func NewTextFieldWithIndexingOptions(name string, arrayPositions []uint64, value []byte, options index.FieldIndexingOptions) *TextField {
|
||||||
return &TextField{
|
return &TextField{
|
||||||
name: name,
|
name: name,
|
||||||
arrayPositions: arrayPositions,
|
arrayPositions: arrayPositions,
|
||||||
|
@ -127,7 +145,7 @@ func NewTextFieldWithAnalyzer(name string, arrayPositions []uint64, value []byte
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTextFieldCustom(name string, arrayPositions []uint64, value []byte, options IndexingOptions, analyzer *analysis.Analyzer) *TextField {
|
func NewTextFieldCustom(name string, arrayPositions []uint64, value []byte, options index.FieldIndexingOptions, analyzer *analysis.Analyzer) *TextField {
|
||||||
return &TextField{
|
return &TextField{
|
||||||
name: name,
|
name: name,
|
||||||
arrayPositions: arrayPositions,
|
arrayPositions: arrayPositions,
|
|
@ -20,7 +20,6 @@ const (
|
||||||
ErrorIndexPathDoesNotExist
|
ErrorIndexPathDoesNotExist
|
||||||
ErrorIndexMetaMissing
|
ErrorIndexMetaMissing
|
||||||
ErrorIndexMetaCorrupt
|
ErrorIndexMetaCorrupt
|
||||||
ErrorUnknownStorageType
|
|
||||||
ErrorIndexClosed
|
ErrorIndexClosed
|
||||||
ErrorAliasMulti
|
ErrorAliasMulti
|
||||||
ErrorAliasEmpty
|
ErrorAliasEmpty
|
||||||
|
@ -42,7 +41,6 @@ var errorMessages = map[Error]string{
|
||||||
ErrorIndexPathDoesNotExist: "cannot open index, path does not exist",
|
ErrorIndexPathDoesNotExist: "cannot open index, path does not exist",
|
||||||
ErrorIndexMetaMissing: "cannot open index, metadata missing",
|
ErrorIndexMetaMissing: "cannot open index, metadata missing",
|
||||||
ErrorIndexMetaCorrupt: "cannot open index, metadata corrupt",
|
ErrorIndexMetaCorrupt: "cannot open index, metadata corrupt",
|
||||||
ErrorUnknownStorageType: "unknown storage type",
|
|
||||||
ErrorIndexClosed: "index is closed",
|
ErrorIndexClosed: "index is closed",
|
||||||
ErrorAliasMulti: "cannot perform single index operation on multiple index alias",
|
ErrorAliasMulti: "cannot perform single index operation on multiple index alias",
|
||||||
ErrorAliasEmpty: "cannot perform operation on empty alias",
|
ErrorAliasEmpty: "cannot perform operation on empty alias",
|
|
@ -18,7 +18,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/numeric"
|
"github.com/blevesearch/bleve/v2/numeric"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GeoBits is the number of bits used for a single geo point
|
// GeoBits is the number of bits used for a single geo point
|
|
@ -1,18 +1,20 @@
|
||||||
module github.com/blevesearch/bleve
|
module github.com/blevesearch/bleve/v2
|
||||||
|
|
||||||
go 1.13
|
go 1.13
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/blevex v1.0.0
|
github.com/blevesearch/bleve_index_api v1.0.0
|
||||||
github.com/blevesearch/go-porterstemmer v1.0.3
|
github.com/blevesearch/go-porterstemmer v1.0.3
|
||||||
|
github.com/blevesearch/scorch_segment_api v1.0.0
|
||||||
github.com/blevesearch/segment v0.9.0
|
github.com/blevesearch/segment v0.9.0
|
||||||
github.com/blevesearch/snowballstem v0.9.0
|
github.com/blevesearch/snowballstem v0.9.0
|
||||||
github.com/blevesearch/zap/v11 v11.0.14
|
github.com/blevesearch/upsidedown_store_api v1.0.1
|
||||||
github.com/blevesearch/zap/v12 v12.0.14
|
github.com/blevesearch/zapx/v11 v11.1.10
|
||||||
github.com/blevesearch/zap/v13 v13.0.6
|
github.com/blevesearch/zapx/v12 v12.1.10
|
||||||
github.com/blevesearch/zap/v14 v14.0.5
|
github.com/blevesearch/zapx/v13 v13.1.10
|
||||||
github.com/blevesearch/zap/v15 v15.0.3
|
github.com/blevesearch/zapx/v14 v14.1.10
|
||||||
|
github.com/blevesearch/zapx/v15 v15.1.10
|
||||||
github.com/couchbase/moss v0.1.0
|
github.com/couchbase/moss v0.1.0
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/protobuf v1.3.2
|
github.com/golang/protobuf v1.3.2
|
128
vendor/github.com/blevesearch/bleve/v2/go.sum
generated
vendored
Normal file
128
vendor/github.com/blevesearch/bleve/v2/go.sum
generated
vendored
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/RoaringBitmap/roaring v0.4.23 h1:gpyfd12QohbqhFO4NVDUdoPOCXsyahYRQhINmlHxKeo=
|
||||||
|
github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo=
|
||||||
|
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||||
|
github.com/blevesearch/bleve_index_api v1.0.0 h1:Ds3XeuTxjXCkG6pgIwWDRyooJKNIuOKemnN0N0IkhTU=
|
||||||
|
github.com/blevesearch/bleve_index_api v1.0.0/go.mod h1:fiwKS0xLEm+gBRgv5mumf0dhgFr2mDgZah1pqv1c1M4=
|
||||||
|
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
|
||||||
|
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
|
||||||
|
github.com/blevesearch/mmap-go v1.0.2 h1:JtMHb+FgQCTTYIhtMvimw15dJwu1Y5lrZDMOFXVWPk0=
|
||||||
|
github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
|
||||||
|
github.com/blevesearch/scorch_segment_api v1.0.0 h1:BUkCPWDg2gimTEyVDXf85I2buqqt4lh28uaVMiJsIYk=
|
||||||
|
github.com/blevesearch/scorch_segment_api v1.0.0/go.mod h1:KgRYmlfYC27NeM6cXOHx8LBgq7jn0atpV8mVWoBKBng=
|
||||||
|
github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac=
|
||||||
|
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
||||||
|
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||||
|
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||||
|
github.com/blevesearch/upsidedown_store_api v1.0.1 h1:1SYRwyoFLwG3sj0ed89RLtM15amfX2pXlYbFOnF8zNU=
|
||||||
|
github.com/blevesearch/upsidedown_store_api v1.0.1/go.mod h1:MQDVGpHZrpe3Uy26zJBf/a8h0FZY6xJbthIMm8myH2Q=
|
||||||
|
github.com/blevesearch/zapx/v11 v11.1.10 h1:8Eo3rXiHsVSP9Sk+4StrrwLrj9vyulhMVPmxTf8ZuDg=
|
||||||
|
github.com/blevesearch/zapx/v11 v11.1.10/go.mod h1:DTjbcBqrr/Uo82UBilDC8lEew42gN/OcIyiTNFtSijc=
|
||||||
|
github.com/blevesearch/zapx/v12 v12.1.10 h1:sqR+/0Z4dSTovApRqLA1HnilMtQer7a4UvPrNmPzlTM=
|
||||||
|
github.com/blevesearch/zapx/v12 v12.1.10/go.mod h1:14NmKnPrnKAIyiEJM566k/Jk+FQpuiflT5d3uaaK3MI=
|
||||||
|
github.com/blevesearch/zapx/v13 v13.1.10 h1:zCneEVRJDXwtDfSwh+33Dxguliv192vCK283zdGH4Sw=
|
||||||
|
github.com/blevesearch/zapx/v13 v13.1.10/go.mod h1:YsVY6YGpTEAlJOMjdL7EsdBLvjWd8kPa2gwJDNpqLJo=
|
||||||
|
github.com/blevesearch/zapx/v14 v14.1.10 h1:nD0vw2jxKogJFfA5WyoS4wNwZlVby3Aq8aW7CZi6YIw=
|
||||||
|
github.com/blevesearch/zapx/v14 v14.1.10/go.mod h1:hsULl5eJSxs5NEfBsmeT9qrqdCP+/ecpVZKt60M4V64=
|
||||||
|
github.com/blevesearch/zapx/v15 v15.1.10 h1:kZR3b9jO9l6s2B5UHI+1N1llLzJ4nYikkXQTMrDl1vQ=
|
||||||
|
github.com/blevesearch/zapx/v15 v15.1.10/go.mod h1:4ypq25bwtSQKzwEF1UERyIhmGTbMT3brY/n4NC5gRnM=
|
||||||
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
|
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||||
|
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
|
github.com/couchbase/ghistogram v0.1.0 h1:b95QcQTCzjTUocDXp/uMgSNQi8oj1tGwnJ4bODWZnps=
|
||||||
|
github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k=
|
||||||
|
github.com/couchbase/moss v0.1.0 h1:HCL+xxHUwmOaL44kMM/gU08OW6QGCui1WVFO58bjhNI=
|
||||||
|
github.com/couchbase/moss v0.1.0/go.mod h1:9MaHIaRuy9pvLPUJxB8sh8OrLfyDczECVL37grCIubs=
|
||||||
|
github.com/couchbase/vellum v1.0.2 h1:BrbP0NKiyDdndMPec8Jjhy0U47CZ0Lgx3xUC2r9rZqw=
|
||||||
|
github.com/couchbase/vellum v1.0.2/go.mod h1:FcwrEivFpNi24R3jLOs3n+fs5RnuQnQqCLBJ1uAg1W4=
|
||||||
|
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2 h1:Ujru1hufTHVb++eG6OuNDKMxZnGIvF6o/u8q/8h2+I4=
|
||||||
|
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||||
|
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
|
||||||
|
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
|
||||||
|
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
|
||||||
|
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKpxb/jFExr4HGq6on2dEOmnL6FV+fgPw=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||||
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
|
github.com/kljensen/snowball v0.6.0 h1:6DZLCcZeL0cLfodx+Md4/OLC6b/bfurWUOUGs1ydfOU=
|
||||||
|
github.com/kljensen/snowball v0.6.0/go.mod h1:27N7E8fVU5H68RlUmnWwZCfxgt4POBJfENGMvNRhldw=
|
||||||
|
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
||||||
|
github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM=
|
||||||
|
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
|
||||||
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
|
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
|
||||||
|
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
|
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
|
||||||
|
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/philhofer/fwd v1.0.0 h1:UbZqGr5Y38ApvM/V/jEljVxwocdweyH+vmYvRPBnbqQ=
|
||||||
|
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563 h1:dY6ETXrvDG7Sa4vE8ZQG4yqWg6UnOcbqTAahkV813vQ=
|
||||||
|
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
|
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
|
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
|
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
|
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
|
||||||
|
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||||
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
|
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||||
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
|
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
|
github.com/steveyen/gtreap v0.1.0 h1:CjhzTa274PyJLJuMZwIzCO1PfC00oRa8d1Kc78bFXJM=
|
||||||
|
github.com/steveyen/gtreap v0.1.0/go.mod h1:kl/5J7XbrOmlIbYIXdRHDDE5QxHqpk0cmkT7Z4dM9/Y=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||||
|
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
||||||
|
github.com/tinylib/msgp v1.1.0 h1:9fQd+ICuRIu/ue4vxJZu6/LzxN0HwMds2nq/0cFvxHU=
|
||||||
|
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||||
|
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||||
|
github.com/willf/bitset v1.1.10 h1:NotGKqX0KwQ72NUzqrjZq5ipPNDQex9lo3WpaS8L2sc=
|
||||||
|
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||||
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
|
go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=
|
||||||
|
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||||
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd h1:nTDtHvHSdCn1m6ITfMRqtOd/9+7a3s8RBNOZ3eYZzJA=
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5 h1:LfCXLvNmTYH9kEmVgqbnsWfruoXZIrh4YBgqVHtDvw0=
|
||||||
|
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
@ -16,12 +16,12 @@ package bleve
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/document"
|
"github.com/blevesearch/bleve/v2/document"
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/mapping"
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/mapping"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/size"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// A Batch groups together multiple Index and Delete
|
// A Batch groups together multiple Index and Delete
|
||||||
|
@ -71,7 +71,7 @@ func (b *Batch) TotalDocsSize() uint64 {
|
||||||
// batch which skips the mapping. NOTE: the bleve Index is not updated
|
// batch which skips the mapping. NOTE: the bleve Index is not updated
|
||||||
// until the batch is executed.
|
// until the batch is executed.
|
||||||
func (b *Batch) IndexAdvanced(doc *document.Document) (err error) {
|
func (b *Batch) IndexAdvanced(doc *document.Document) (err error) {
|
||||||
if doc.ID == "" {
|
if doc.ID() == "" {
|
||||||
return ErrorEmptyID
|
return ErrorEmptyID
|
||||||
}
|
}
|
||||||
b.internal.Update(doc)
|
b.internal.Update(doc)
|
||||||
|
@ -216,7 +216,7 @@ type Index interface {
|
||||||
|
|
||||||
// Document returns specified document or nil if the document is not
|
// Document returns specified document or nil if the document is not
|
||||||
// indexed or stored.
|
// indexed or stored.
|
||||||
Document(id string) (*document.Document, error)
|
Document(id string) (index.Document, error)
|
||||||
// DocCount returns the number of documents in the index.
|
// DocCount returns the number of documents in the index.
|
||||||
DocCount() (uint64, error)
|
DocCount() (uint64, error)
|
||||||
|
|
||||||
|
@ -245,9 +245,8 @@ type Index interface {
|
||||||
// SetName lets you assign your own logical name to this index
|
// SetName lets you assign your own logical name to this index
|
||||||
SetName(string)
|
SetName(string)
|
||||||
|
|
||||||
// Advanced returns the indexer and data store, exposing lower level
|
// Advanced returns the internal index implementation
|
||||||
// methods to enumerate records and access data.
|
Advanced() (index.Index, error)
|
||||||
Advanced() (index.Index, store.KVStore, error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// New index at the specified path, must not exist.
|
// New index at the specified path, must not exist.
|
||||||
|
@ -263,7 +262,7 @@ func New(path string, mapping mapping.IndexMapping) (Index, error) {
|
||||||
// The provided mapping will be used for all
|
// The provided mapping will be used for all
|
||||||
// Index/Search operations.
|
// Index/Search operations.
|
||||||
func NewMemOnly(mapping mapping.IndexMapping) (Index, error) {
|
func NewMemOnly(mapping mapping.IndexMapping) (Index, error) {
|
||||||
return newIndexUsing("", mapping, Config.DefaultIndexType, Config.DefaultMemKVStore, nil)
|
return newIndexUsing("", mapping, upsidedown.Name, Config.DefaultMemKVStore, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewUsing creates index at the specified path,
|
// NewUsing creates index at the specified path,
|
|
@ -21,9 +21,8 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/document"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -40,7 +39,7 @@ type Builder struct {
|
||||||
mergeMax int
|
mergeMax int
|
||||||
batch *index.Batch
|
batch *index.Batch
|
||||||
internal map[string][]byte
|
internal map[string][]byte
|
||||||
segPlugin segment.Plugin
|
segPlugin SegmentPlugin
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBuilder(config map[string]interface{}) (*Builder, error) {
|
func NewBuilder(config map[string]interface{}) (*Builder, error) {
|
||||||
|
@ -117,7 +116,7 @@ func (o *Builder) parseConfig(config map[string]interface{}) (err error) {
|
||||||
|
|
||||||
// Index will place the document into the index.
|
// Index will place the document into the index.
|
||||||
// It is invalid to index the same document multiple times.
|
// It is invalid to index the same document multiple times.
|
||||||
func (o *Builder) Index(doc *document.Document) error {
|
func (o *Builder) Index(doc index.Document) error {
|
||||||
o.m.Lock()
|
o.m.Lock()
|
||||||
defer o.m.Unlock()
|
defer o.m.Unlock()
|
||||||
|
|
||||||
|
@ -135,14 +134,14 @@ func (o *Builder) maybeFlushBatchLOCKED(moreThan int) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *Builder) executeBatchLOCKED(batch *index.Batch) (err error) {
|
func (o *Builder) executeBatchLOCKED(batch *index.Batch) (err error) {
|
||||||
analysisResults := make([]*index.AnalysisResult, 0, len(batch.IndexOps))
|
analysisResults := make([]index.Document, 0, len(batch.IndexOps))
|
||||||
for _, doc := range batch.IndexOps {
|
for _, doc := range batch.IndexOps {
|
||||||
if doc != nil {
|
if doc != nil {
|
||||||
// insert _id field
|
// insert _id field
|
||||||
doc.AddField(document.NewTextFieldCustom("_id", nil, []byte(doc.ID), document.IndexField|document.StoreField, nil))
|
doc.AddIDField()
|
||||||
// perform analysis directly
|
// perform analysis directly
|
||||||
analysisResult := analyze(doc)
|
analyze(doc)
|
||||||
analysisResults = append(analysisResults, analysisResult)
|
analysisResults = append(analysisResults, doc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
33
vendor/github.com/blevesearch/bleve/v2/index/scorch/empty.go
generated
vendored
Normal file
33
vendor/github.com/blevesearch/bleve/v2/index/scorch/empty.go
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
// Copyright (c) 2020 Couchbase, Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package scorch
|
||||||
|
|
||||||
|
import segment "github.com/blevesearch/scorch_segment_api"
|
||||||
|
|
||||||
|
type emptyPostingsIterator struct{}
|
||||||
|
|
||||||
|
func (e *emptyPostingsIterator) Next() (segment.Posting, error) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *emptyPostingsIterator) Advance(uint64) (segment.Posting, error) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *emptyPostingsIterator) Size() int {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var anEmptyPostingsIterator = &emptyPostingsIterator{}
|
92
vendor/github.com/blevesearch/bleve/v2/index/scorch/int.go
generated
vendored
Normal file
92
vendor/github.com/blevesearch/bleve/v2/index/scorch/int.go
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
// Copyright 2014 The Cockroach Authors.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
// implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
// This code originated from:
|
||||||
|
// https://github.com/cockroachdb/cockroach/blob/2dd65dde5d90c157f4b93f92502ca1063b904e1d/pkg/util/encoding/encoding.go
|
||||||
|
|
||||||
|
// Modified to not use pkg/errors
|
||||||
|
|
||||||
|
package scorch
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
const (
|
||||||
|
// intMin is chosen such that the range of int tags does not overlap the
|
||||||
|
// ascii character set that is frequently used in testing.
|
||||||
|
intMin = 0x80 // 128
|
||||||
|
intMaxWidth = 8
|
||||||
|
intZero = intMin + intMaxWidth // 136
|
||||||
|
intSmall = intMax - intZero - intMaxWidth // 109
|
||||||
|
// intMax is the maximum int tag value.
|
||||||
|
intMax = 0xfd // 253
|
||||||
|
)
|
||||||
|
|
||||||
|
// encodeUvarintAscending encodes the uint64 value using a variable length
|
||||||
|
// (length-prefixed) representation. The length is encoded as a single
|
||||||
|
// byte indicating the number of encoded bytes (-8) to follow. See
|
||||||
|
// EncodeVarintAscending for rationale. The encoded bytes are appended to the
|
||||||
|
// supplied buffer and the final buffer is returned.
|
||||||
|
func encodeUvarintAscending(b []byte, v uint64) []byte {
|
||||||
|
switch {
|
||||||
|
case v <= intSmall:
|
||||||
|
return append(b, intZero+byte(v))
|
||||||
|
case v <= 0xff:
|
||||||
|
return append(b, intMax-7, byte(v))
|
||||||
|
case v <= 0xffff:
|
||||||
|
return append(b, intMax-6, byte(v>>8), byte(v))
|
||||||
|
case v <= 0xffffff:
|
||||||
|
return append(b, intMax-5, byte(v>>16), byte(v>>8), byte(v))
|
||||||
|
case v <= 0xffffffff:
|
||||||
|
return append(b, intMax-4, byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
|
||||||
|
case v <= 0xffffffffff:
|
||||||
|
return append(b, intMax-3, byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8),
|
||||||
|
byte(v))
|
||||||
|
case v <= 0xffffffffffff:
|
||||||
|
return append(b, intMax-2, byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16),
|
||||||
|
byte(v>>8), byte(v))
|
||||||
|
case v <= 0xffffffffffffff:
|
||||||
|
return append(b, intMax-1, byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24),
|
||||||
|
byte(v>>16), byte(v>>8), byte(v))
|
||||||
|
default:
|
||||||
|
return append(b, intMax, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32),
|
||||||
|
byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// decodeUvarintAscending decodes a varint encoded uint64 from the input
|
||||||
|
// buffer. The remainder of the input buffer and the decoded uint64
|
||||||
|
// are returned.
|
||||||
|
func decodeUvarintAscending(b []byte) ([]byte, uint64, error) {
|
||||||
|
if len(b) == 0 {
|
||||||
|
return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value")
|
||||||
|
}
|
||||||
|
length := int(b[0]) - intZero
|
||||||
|
b = b[1:] // skip length byte
|
||||||
|
if length <= intSmall {
|
||||||
|
return b, uint64(length), nil
|
||||||
|
}
|
||||||
|
length -= intSmall
|
||||||
|
if length < 0 || length > 8 {
|
||||||
|
return nil, 0, fmt.Errorf("invalid uvarint length of %d", length)
|
||||||
|
} else if len(b) < length {
|
||||||
|
return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value: %q", b)
|
||||||
|
}
|
||||||
|
var v uint64
|
||||||
|
// It is faster to range over the elements in a slice than to index
|
||||||
|
// into the slice on each loop iteration.
|
||||||
|
for _, t := range b[:length] {
|
||||||
|
v = (v << 8) | uint64(t)
|
||||||
|
}
|
||||||
|
return b[length:], v, nil
|
||||||
|
}
|
|
@ -19,8 +19,8 @@ import (
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type segmentIntroduction struct {
|
type segmentIntroduction struct {
|
|
@ -24,8 +24,8 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index/scorch/mergeplan"
|
"github.com/blevesearch/bleve/v2/index/scorch/mergeplan"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (s *Scorch) mergerLoop() {
|
func (s *Scorch) mergerLoop() {
|
|
@ -17,8 +17,8 @@ package scorch
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -174,9 +174,9 @@ OUTER:
|
||||||
var docNum1HitLastOk bool
|
var docNum1HitLastOk bool
|
||||||
|
|
||||||
for _, tfr := range o.tfrs {
|
for _, tfr := range o.tfrs {
|
||||||
if _, ok := tfr.iterators[i].(*segment.EmptyPostingsIterator); ok {
|
if _, ok := tfr.iterators[i].(*emptyPostingsIterator); ok {
|
||||||
// An empty postings iterator means the entire AND is empty.
|
// An empty postings iterator means the entire AND is empty.
|
||||||
oTFR.iterators[i] = segment.AnEmptyPostingsIterator
|
oTFR.iterators[i] = anEmptyPostingsIterator
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,7 +193,7 @@ OUTER:
|
||||||
if docNum1HitLastOk && docNum1HitLast != docNum1Hit {
|
if docNum1HitLastOk && docNum1HitLast != docNum1Hit {
|
||||||
// The docNum1Hit doesn't match the previous
|
// The docNum1Hit doesn't match the previous
|
||||||
// docNum1HitLast, so the entire AND is empty.
|
// docNum1HitLast, so the entire AND is empty.
|
||||||
oTFR.iterators[i] = segment.AnEmptyPostingsIterator
|
oTFR.iterators[i] = anEmptyPostingsIterator
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ OUTER:
|
||||||
|
|
||||||
if itr.ActualBitmap() == nil {
|
if itr.ActualBitmap() == nil {
|
||||||
// An empty actual bitmap means the entire AND is empty.
|
// An empty actual bitmap means the entire AND is empty.
|
||||||
oTFR.iterators[i] = segment.AnEmptyPostingsIterator
|
oTFR.iterators[i] = anEmptyPostingsIterator
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,14 +221,14 @@ OUTER:
|
||||||
if !bm.Contains(uint32(docNum1HitLast)) {
|
if !bm.Contains(uint32(docNum1HitLast)) {
|
||||||
// The docNum1Hit isn't in one of our actual
|
// The docNum1Hit isn't in one of our actual
|
||||||
// bitmaps, so the entire AND is empty.
|
// bitmaps, so the entire AND is empty.
|
||||||
oTFR.iterators[i] = segment.AnEmptyPostingsIterator
|
oTFR.iterators[i] = anEmptyPostingsIterator
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The actual bitmaps and docNum1Hits all contain or have
|
// The actual bitmaps and docNum1Hits all contain or have
|
||||||
// the same 1-hit docNum, so that's our AND'ed result.
|
// the same 1-hit docNum, so that's our AND'ed result.
|
||||||
oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFrom1Hit(docNum1HitLast)
|
oTFR.iterators[i] = newUnadornedPostingsIteratorFrom1Hit(docNum1HitLast)
|
||||||
|
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
@ -236,13 +236,13 @@ OUTER:
|
||||||
if len(actualBMs) == 0 {
|
if len(actualBMs) == 0 {
|
||||||
// If we've collected no actual bitmaps at this point,
|
// If we've collected no actual bitmaps at this point,
|
||||||
// then the entire AND is empty.
|
// then the entire AND is empty.
|
||||||
oTFR.iterators[i] = segment.AnEmptyPostingsIterator
|
oTFR.iterators[i] = anEmptyPostingsIterator
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(actualBMs) == 1 {
|
if len(actualBMs) == 1 {
|
||||||
// If we've only 1 actual bitmap, then that's our result.
|
// If we've only 1 actual bitmap, then that's our result.
|
||||||
oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(actualBMs[0])
|
oTFR.iterators[i] = newUnadornedPostingsIteratorFromBitmap(actualBMs[0])
|
||||||
|
|
||||||
continue OUTER
|
continue OUTER
|
||||||
}
|
}
|
||||||
|
@ -254,7 +254,7 @@ OUTER:
|
||||||
bm.And(actualBM)
|
bm.And(actualBM)
|
||||||
}
|
}
|
||||||
|
|
||||||
oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(bm)
|
oTFR.iterators[i] = newUnadornedPostingsIteratorFromBitmap(bm)
|
||||||
}
|
}
|
||||||
|
|
||||||
atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1))
|
atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1))
|
||||||
|
@ -369,7 +369,7 @@ func (o *OptimizeTFRDisjunctionUnadorned) Finish() (rv index.Optimized, err erro
|
||||||
|
|
||||||
bm.AddMany(docNums)
|
bm.AddMany(docNums)
|
||||||
|
|
||||||
oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(bm)
|
oTFR.iterators[i] = newUnadornedPostingsIteratorFromBitmap(bm)
|
||||||
}
|
}
|
||||||
|
|
||||||
atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1))
|
atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1))
|
|
@ -30,8 +30,8 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -429,12 +429,12 @@ func (s *Scorch) persistSnapshotMaybeMerge(snapshot *IndexSnapshot) (
|
||||||
}
|
}
|
||||||
|
|
||||||
func prepareBoltSnapshot(snapshot *IndexSnapshot, tx *bolt.Tx, path string,
|
func prepareBoltSnapshot(snapshot *IndexSnapshot, tx *bolt.Tx, path string,
|
||||||
segPlugin segment.Plugin) ([]string, map[uint64]string, error) {
|
segPlugin SegmentPlugin) ([]string, map[uint64]string, error) {
|
||||||
snapshotsBucket, err := tx.CreateBucketIfNotExists(boltSnapshotsBucket)
|
snapshotsBucket, err := tx.CreateBucketIfNotExists(boltSnapshotsBucket)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
newSnapshotKey := segment.EncodeUvarintAscending(nil, snapshot.epoch)
|
newSnapshotKey := encodeUvarintAscending(nil, snapshot.epoch)
|
||||||
snapshotBucket, err := snapshotsBucket.CreateBucketIfNotExists(newSnapshotKey)
|
snapshotBucket, err := snapshotsBucket.CreateBucketIfNotExists(newSnapshotKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
@ -474,7 +474,7 @@ func prepareBoltSnapshot(snapshot *IndexSnapshot, tx *bolt.Tx, path string,
|
||||||
|
|
||||||
// first ensure that each segment in this snapshot has been persisted
|
// first ensure that each segment in this snapshot has been persisted
|
||||||
for _, segmentSnapshot := range snapshot.segment {
|
for _, segmentSnapshot := range snapshot.segment {
|
||||||
snapshotSegmentKey := segment.EncodeUvarintAscending(nil, segmentSnapshot.id)
|
snapshotSegmentKey := encodeUvarintAscending(nil, segmentSnapshot.id)
|
||||||
snapshotSegmentBucket, err := snapshotBucket.CreateBucketIfNotExists(snapshotSegmentKey)
|
snapshotSegmentBucket, err := snapshotBucket.CreateBucketIfNotExists(snapshotSegmentKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
@ -628,7 +628,7 @@ func (s *Scorch) loadFromBolt() error {
|
||||||
foundRoot := false
|
foundRoot := false
|
||||||
c := snapshots.Cursor()
|
c := snapshots.Cursor()
|
||||||
for k, _ := c.Last(); k != nil; k, _ = c.Prev() {
|
for k, _ := c.Last(); k != nil; k, _ = c.Prev() {
|
||||||
_, snapshotEpoch, err := segment.DecodeUvarintAscending(k)
|
_, snapshotEpoch, err := decodeUvarintAscending(k)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("unable to parse segment epoch %x, continuing", k)
|
log.Printf("unable to parse segment epoch %x, continuing", k)
|
||||||
continue
|
continue
|
||||||
|
@ -680,7 +680,7 @@ func (s *Scorch) LoadSnapshot(epoch uint64) (rv *IndexSnapshot, err error) {
|
||||||
if snapshots == nil {
|
if snapshots == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
snapshotKey := segment.EncodeUvarintAscending(nil, epoch)
|
snapshotKey := encodeUvarintAscending(nil, epoch)
|
||||||
snapshot := snapshots.Bucket(snapshotKey)
|
snapshot := snapshots.Bucket(snapshotKey)
|
||||||
if snapshot == nil {
|
if snapshot == nil {
|
||||||
return fmt.Errorf("snapshot with epoch: %v - doesn't exist", epoch)
|
return fmt.Errorf("snapshot with epoch: %v - doesn't exist", epoch)
|
||||||
|
@ -744,7 +744,7 @@ func (s *Scorch) loadSnapshot(snapshot *bolt.Bucket) (*IndexSnapshot, error) {
|
||||||
_ = rv.DecRef()
|
_ = rv.DecRef()
|
||||||
return nil, fmt.Errorf("failed to load segment: %v", err)
|
return nil, fmt.Errorf("failed to load segment: %v", err)
|
||||||
}
|
}
|
||||||
_, segmentSnapshot.id, err = segment.DecodeUvarintAscending(k)
|
_, segmentSnapshot.id, err = decodeUvarintAscending(k)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = rv.DecRef()
|
_ = rv.DecRef()
|
||||||
return nil, fmt.Errorf("failed to decode segment id: %v", err)
|
return nil, fmt.Errorf("failed to decode segment id: %v", err)
|
||||||
|
@ -865,7 +865,7 @@ func (s *Scorch) removeOldBoltSnapshots() (numRemoved int, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, epochToRemove := range epochsToRemove {
|
for _, epochToRemove := range epochsToRemove {
|
||||||
k := segment.EncodeUvarintAscending(nil, epochToRemove)
|
k := encodeUvarintAscending(nil, epochToRemove)
|
||||||
err = snapshots.DeleteBucket(k)
|
err = snapshots.DeleteBucket(k)
|
||||||
if err == bolt.ErrBucketNotFound {
|
if err == bolt.ErrBucketNotFound {
|
||||||
err = nil
|
err = nil
|
||||||
|
@ -941,7 +941,7 @@ func (s *Scorch) RootBoltSnapshotEpochs() ([]uint64, error) {
|
||||||
}
|
}
|
||||||
sc := snapshots.Cursor()
|
sc := snapshots.Cursor()
|
||||||
for sk, _ := sc.Last(); sk != nil; sk, _ = sc.Prev() {
|
for sk, _ := sc.Last(); sk != nil; sk, _ = sc.Prev() {
|
||||||
_, snapshotEpoch, err := segment.DecodeUvarintAscending(sk)
|
_, snapshotEpoch, err := decodeUvarintAscending(sk)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright (c) 2018 Couchbase, Inc.
|
// Copyright (c) 2020 Couchbase, Inc.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -12,7 +12,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package segment
|
package scorch
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"regexp/syntax"
|
"regexp/syntax"
|
||||||
|
@ -20,7 +20,7 @@ import (
|
||||||
"github.com/couchbase/vellum/regexp"
|
"github.com/couchbase/vellum/regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ParseRegexp(pattern string) (a *regexp.Regexp, prefixBeg, prefixEnd []byte, err error) {
|
func parseRegexp(pattern string) (a *regexp.Regexp, prefixBeg, prefixEnd []byte, err error) {
|
||||||
// TODO: potential optimization where syntax.Regexp supports a Simplify() API?
|
// TODO: potential optimization where syntax.Regexp supports a Simplify() API?
|
||||||
|
|
||||||
parsed, err := syntax.Parse(pattern, syntax.Perl)
|
parsed, err := syntax.Parse(pattern, syntax.Perl)
|
||||||
|
@ -33,10 +33,10 @@ func ParseRegexp(pattern string) (a *regexp.Regexp, prefixBeg, prefixEnd []byte,
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
prefix := LiteralPrefix(parsed)
|
prefix := literalPrefix(parsed)
|
||||||
if prefix != "" {
|
if prefix != "" {
|
||||||
prefixBeg := []byte(prefix)
|
prefixBeg := []byte(prefix)
|
||||||
prefixEnd := IncrementBytes(prefixBeg)
|
prefixEnd := calculateExclusiveEndFromPrefix(prefixBeg)
|
||||||
return re, prefixBeg, prefixEnd, nil
|
return re, prefixBeg, prefixEnd, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ func ParseRegexp(pattern string) (a *regexp.Regexp, prefixBeg, prefixEnd []byte,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the literal prefix given the parse tree for a regexp
|
// Returns the literal prefix given the parse tree for a regexp
|
||||||
func LiteralPrefix(s *syntax.Regexp) string {
|
func literalPrefix(s *syntax.Regexp) string {
|
||||||
// traverse the left-most branch in the parse tree as long as the
|
// traverse the left-most branch in the parse tree as long as the
|
||||||
// node represents a concatenation
|
// node represents a concatenation
|
||||||
for s != nil && s.Op == syntax.OpConcat {
|
for s != nil && s.Op == syntax.OpConcat {
|
||||||
|
@ -61,15 +61,3 @@ func LiteralPrefix(s *syntax.Regexp) string {
|
||||||
|
|
||||||
return "" // no literal prefix
|
return "" // no literal prefix
|
||||||
}
|
}
|
||||||
|
|
||||||
func IncrementBytes(in []byte) []byte {
|
|
||||||
rv := make([]byte, len(in))
|
|
||||||
copy(rv, in)
|
|
||||||
for i := len(rv) - 1; i >= 0; i-- {
|
|
||||||
rv[i] = rv[i] + 1
|
|
||||||
if rv[i] != 0 {
|
|
||||||
return rv // didn't overflow, so stop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil // overflowed
|
|
||||||
}
|
|
|
@ -19,7 +19,6 @@ import (
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -71,7 +70,7 @@ func RollbackPoints(path string) ([]*RollbackPoint, error) {
|
||||||
|
|
||||||
c1 := snapshots.Cursor()
|
c1 := snapshots.Cursor()
|
||||||
for k, _ := c1.Last(); k != nil; k, _ = c1.Prev() {
|
for k, _ := c1.Last(); k != nil; k, _ = c1.Prev() {
|
||||||
_, snapshotEpoch, err := segment.DecodeUvarintAscending(k)
|
_, snapshotEpoch, err := decodeUvarintAscending(k)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("RollbackPoints:"+
|
log.Printf("RollbackPoints:"+
|
||||||
" unable to parse segment epoch %x, continuing", k)
|
" unable to parse segment epoch %x, continuing", k)
|
||||||
|
@ -154,7 +153,7 @@ func Rollback(path string, to *RollbackPoint) error {
|
||||||
}
|
}
|
||||||
sc := snapshots.Cursor()
|
sc := snapshots.Cursor()
|
||||||
for sk, _ := sc.Last(); sk != nil && !found; sk, _ = sc.Prev() {
|
for sk, _ := sc.Last(); sk != nil && !found; sk, _ = sc.Prev() {
|
||||||
_, snapshotEpoch, err := segment.DecodeUvarintAscending(sk)
|
_, snapshotEpoch, err := decodeUvarintAscending(sk)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -195,7 +194,7 @@ func Rollback(path string, to *RollbackPoint) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
for _, epoch := range eligibleEpochs {
|
for _, epoch := range eligibleEpochs {
|
||||||
k := segment.EncodeUvarintAscending(nil, epoch)
|
k := encodeUvarintAscending(nil, epoch)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
|
@ -24,12 +24,9 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/analysis"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
"github.com/blevesearch/bleve/document"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
|
||||||
"github.com/blevesearch/bleve/registry"
|
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -75,7 +72,7 @@ type Scorch struct {
|
||||||
|
|
||||||
forceMergeRequestCh chan *mergerCtrl
|
forceMergeRequestCh chan *mergerCtrl
|
||||||
|
|
||||||
segPlugin segment.Plugin
|
segPlugin SegmentPlugin
|
||||||
}
|
}
|
||||||
|
|
||||||
type internalStats struct {
|
type internalStats struct {
|
||||||
|
@ -293,7 +290,7 @@ func (s *Scorch) Close() (err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Scorch) Update(doc *document.Document) error {
|
func (s *Scorch) Update(doc index.Document) error {
|
||||||
b := index.NewBatch()
|
b := index.NewBatch()
|
||||||
b.Update(doc)
|
b.Update(doc)
|
||||||
return s.Batch(b)
|
return s.Batch(b)
|
||||||
|
@ -313,7 +310,7 @@ func (s *Scorch) Batch(batch *index.Batch) (err error) {
|
||||||
s.fireEvent(EventKindBatchIntroduction, time.Since(start))
|
s.fireEvent(EventKindBatchIntroduction, time.Since(start))
|
||||||
}()
|
}()
|
||||||
|
|
||||||
resultChan := make(chan *index.AnalysisResult, len(batch.IndexOps))
|
resultChan := make(chan index.Document, len(batch.IndexOps))
|
||||||
|
|
||||||
var numUpdates uint64
|
var numUpdates uint64
|
||||||
var numDeletes uint64
|
var numDeletes uint64
|
||||||
|
@ -322,7 +319,7 @@ func (s *Scorch) Batch(batch *index.Batch) (err error) {
|
||||||
for docID, doc := range batch.IndexOps {
|
for docID, doc := range batch.IndexOps {
|
||||||
if doc != nil {
|
if doc != nil {
|
||||||
// insert _id field
|
// insert _id field
|
||||||
doc.AddField(document.NewTextFieldCustom("_id", nil, []byte(doc.ID), document.IndexField|document.StoreField, nil))
|
doc.AddIDField()
|
||||||
numUpdates++
|
numUpdates++
|
||||||
numPlainTextBytes += doc.NumPlainTextBytes()
|
numPlainTextBytes += doc.NumPlainTextBytes()
|
||||||
} else {
|
} else {
|
||||||
|
@ -335,18 +332,21 @@ func (s *Scorch) Batch(batch *index.Batch) (err error) {
|
||||||
|
|
||||||
if numUpdates > 0 {
|
if numUpdates > 0 {
|
||||||
go func() {
|
go func() {
|
||||||
for _, doc := range batch.IndexOps {
|
for k := range batch.IndexOps {
|
||||||
|
doc := batch.IndexOps[k]
|
||||||
if doc != nil {
|
if doc != nil {
|
||||||
aw := index.NewAnalysisWork(s, doc, resultChan)
|
|
||||||
// put the work on the queue
|
// put the work on the queue
|
||||||
s.analysisQueue.Queue(aw)
|
s.analysisQueue.Queue(func() {
|
||||||
|
analyze(doc)
|
||||||
|
resultChan <- doc
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
// wait for analysis result
|
// wait for analysis result
|
||||||
analysisResults := make([]*index.AnalysisResult, int(numUpdates))
|
analysisResults := make([]index.Document, int(numUpdates))
|
||||||
var itemsDeQueued uint64
|
var itemsDeQueued uint64
|
||||||
var totalAnalysisSize int
|
var totalAnalysisSize int
|
||||||
for itemsDeQueued < numUpdates {
|
for itemsDeQueued < numUpdates {
|
||||||
|
@ -566,37 +566,23 @@ func (s *Scorch) StatsMap() map[string]interface{} {
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Scorch) Analyze(d *document.Document) *index.AnalysisResult {
|
func (s *Scorch) Analyze(d index.Document) {
|
||||||
return analyze(d)
|
analyze(d)
|
||||||
}
|
}
|
||||||
|
|
||||||
func analyze(d *document.Document) *index.AnalysisResult {
|
func analyze(d index.Document) {
|
||||||
rv := &index.AnalysisResult{
|
d.VisitFields(func(field index.Field) {
|
||||||
Document: d,
|
|
||||||
Analyzed: make([]analysis.TokenFrequencies, len(d.Fields)+len(d.CompositeFields)),
|
|
||||||
Length: make([]int, len(d.Fields)+len(d.CompositeFields)),
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, field := range d.Fields {
|
|
||||||
if field.Options().IsIndexed() {
|
if field.Options().IsIndexed() {
|
||||||
fieldLength, tokenFreqs := field.Analyze()
|
field.Analyze()
|
||||||
rv.Analyzed[i] = tokenFreqs
|
|
||||||
rv.Length[i] = fieldLength
|
|
||||||
|
|
||||||
if len(d.CompositeFields) > 0 && field.Name() != "_id" {
|
if d.HasComposite() && field.Name() != "_id" {
|
||||||
// see if any of the composite fields need this
|
// see if any of the composite fields need this
|
||||||
for _, compositeField := range d.CompositeFields {
|
d.VisitComposite(func(cf index.CompositeField) {
|
||||||
compositeField.Compose(field.Name(), fieldLength, tokenFreqs)
|
cf.Compose(field.Name(), field.AnalyzedLength(), field.AnalyzedTokenFrequencies())
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
|
||||||
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Scorch) Advanced() (store.KVStore, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Scorch) AddEligibleForRemoval(epoch uint64) {
|
func (s *Scorch) AddEligibleForRemoval(epoch uint64) {
|
133
vendor/github.com/blevesearch/bleve/v2/index/scorch/segment_plugin.go
generated
vendored
Normal file
133
vendor/github.com/blevesearch/bleve/v2/index/scorch/segment_plugin.go
generated
vendored
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
// Copyright (c) 2019 Couchbase, Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package scorch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/RoaringBitmap/roaring"
|
||||||
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
|
|
||||||
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
|
|
||||||
|
zapv11 "github.com/blevesearch/zapx/v11"
|
||||||
|
zapv12 "github.com/blevesearch/zapx/v12"
|
||||||
|
zapv13 "github.com/blevesearch/zapx/v13"
|
||||||
|
zapv14 "github.com/blevesearch/zapx/v14"
|
||||||
|
zapv15 "github.com/blevesearch/zapx/v15"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SegmentPlugin represents the essential functions required by a package to plug in
|
||||||
|
// it's segment implementation
|
||||||
|
type SegmentPlugin interface {
|
||||||
|
|
||||||
|
// Type is the name for this segment plugin
|
||||||
|
Type() string
|
||||||
|
|
||||||
|
// Version is a numeric value identifying a specific version of this type.
|
||||||
|
// When incompatible changes are made to a particular type of plugin, the
|
||||||
|
// version must be incremented.
|
||||||
|
Version() uint32
|
||||||
|
|
||||||
|
// New takes a set of Documents and turns them into a new Segment
|
||||||
|
New(results []index.Document) (segment.Segment, uint64, error)
|
||||||
|
|
||||||
|
// Open attempts to open the file at the specified path and
|
||||||
|
// return the corresponding Segment
|
||||||
|
Open(path string) (segment.Segment, error)
|
||||||
|
|
||||||
|
// Merge takes a set of Segments, and creates a new segment on disk at
|
||||||
|
// the specified path.
|
||||||
|
// Drops is a set of bitmaps (one for each segment) indicating which
|
||||||
|
// documents can be dropped from the segments during the merge.
|
||||||
|
// If the closeCh channel is closed, Merge will cease doing work at
|
||||||
|
// the next opportunity, and return an error (closed).
|
||||||
|
// StatsReporter can optionally be provided, in which case progress
|
||||||
|
// made during the merge is reported while operation continues.
|
||||||
|
// Returns:
|
||||||
|
// A slice of new document numbers (one for each input segment),
|
||||||
|
// this allows the caller to know a particular document's new
|
||||||
|
// document number in the newly merged segment.
|
||||||
|
// The number of bytes written to the new segment file.
|
||||||
|
// An error, if any occurred.
|
||||||
|
Merge(segments []segment.Segment, drops []*roaring.Bitmap, path string,
|
||||||
|
closeCh chan struct{}, s segment.StatsReporter) (
|
||||||
|
[][]uint64, uint64, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var supportedSegmentPlugins map[string]map[uint32]SegmentPlugin
|
||||||
|
var defaultSegmentPlugin SegmentPlugin
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
ResetSegmentPlugins()
|
||||||
|
RegisterSegmentPlugin(&zapv15.ZapPlugin{}, true)
|
||||||
|
RegisterSegmentPlugin(&zapv14.ZapPlugin{}, false)
|
||||||
|
RegisterSegmentPlugin(&zapv13.ZapPlugin{}, false)
|
||||||
|
RegisterSegmentPlugin(&zapv12.ZapPlugin{}, false)
|
||||||
|
RegisterSegmentPlugin(&zapv11.ZapPlugin{}, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ResetSegmentPlugins() {
|
||||||
|
supportedSegmentPlugins = map[string]map[uint32]SegmentPlugin{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RegisterSegmentPlugin(plugin SegmentPlugin, makeDefault bool) {
|
||||||
|
if _, ok := supportedSegmentPlugins[plugin.Type()]; !ok {
|
||||||
|
supportedSegmentPlugins[plugin.Type()] = map[uint32]SegmentPlugin{}
|
||||||
|
}
|
||||||
|
supportedSegmentPlugins[plugin.Type()][plugin.Version()] = plugin
|
||||||
|
if makeDefault {
|
||||||
|
defaultSegmentPlugin = plugin
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SupportedSegmentTypes() (rv []string) {
|
||||||
|
for k := range supportedSegmentPlugins {
|
||||||
|
rv = append(rv, k)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func SupportedSegmentTypeVersions(typ string) (rv []uint32) {
|
||||||
|
for k := range supportedSegmentPlugins[typ] {
|
||||||
|
rv = append(rv, k)
|
||||||
|
}
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
|
func chooseSegmentPlugin(forcedSegmentType string,
|
||||||
|
forcedSegmentVersion uint32) (SegmentPlugin, error) {
|
||||||
|
if versions, ok := supportedSegmentPlugins[forcedSegmentType]; ok {
|
||||||
|
if segPlugin, ok := versions[uint32(forcedSegmentVersion)]; ok {
|
||||||
|
return segPlugin, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf(
|
||||||
|
"unsupported version %d for segment type: %s, supported: %v",
|
||||||
|
forcedSegmentVersion, forcedSegmentType,
|
||||||
|
SupportedSegmentTypeVersions(forcedSegmentType))
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unsupported segment type: %s, supported: %v",
|
||||||
|
forcedSegmentType, SupportedSegmentTypes())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scorch) loadSegmentPlugin(forcedSegmentType string,
|
||||||
|
forcedSegmentVersion uint32) error {
|
||||||
|
segPlugin, err := chooseSegmentPlugin(forcedSegmentType,
|
||||||
|
forcedSegmentVersion)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.segPlugin = segPlugin
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -24,9 +24,9 @@ import (
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/document"
|
"github.com/blevesearch/bleve/v2/document"
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
"github.com/couchbase/vellum"
|
"github.com/couchbase/vellum"
|
||||||
lev "github.com/couchbase/vellum/levenshtein"
|
lev "github.com/couchbase/vellum/levenshtein"
|
||||||
)
|
)
|
||||||
|
@ -190,21 +190,62 @@ func (i *IndexSnapshot) newIndexSnapshotFieldDict(field string,
|
||||||
|
|
||||||
func (i *IndexSnapshot) FieldDict(field string) (index.FieldDict, error) {
|
func (i *IndexSnapshot) FieldDict(field string) (index.FieldDict, error) {
|
||||||
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
||||||
return i.Iterator()
|
return i.AutomatonIterator(nil, nil, nil)
|
||||||
}, false)
|
}, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// calculateExclusiveEndFromInclusiveEnd produces the next key
|
||||||
|
// when sorting using memcmp style comparisons, suitable to
|
||||||
|
// use as the end key in a traditional (inclusive, exclusive]
|
||||||
|
// start/end range
|
||||||
|
func calculateExclusiveEndFromInclusiveEnd(inclusiveEnd []byte) []byte {
|
||||||
|
rv := inclusiveEnd
|
||||||
|
if len(inclusiveEnd) > 0 {
|
||||||
|
rv = make([]byte, len(inclusiveEnd))
|
||||||
|
copy(rv, inclusiveEnd)
|
||||||
|
if rv[len(rv)-1] < 0xff {
|
||||||
|
// last byte can be incremented by one
|
||||||
|
rv[len(rv)-1]++
|
||||||
|
} else {
|
||||||
|
// last byte is already 0xff, so append 0
|
||||||
|
// next key is simply one byte longer
|
||||||
|
rv = append(rv, 0x0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) FieldDictRange(field string, startTerm []byte,
|
func (i *IndexSnapshot) FieldDictRange(field string, startTerm []byte,
|
||||||
endTerm []byte) (index.FieldDict, error) {
|
endTerm []byte) (index.FieldDict, error) {
|
||||||
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
||||||
return i.RangeIterator(string(startTerm), string(endTerm))
|
endTermExclusive := calculateExclusiveEndFromInclusiveEnd(endTerm)
|
||||||
|
return i.AutomatonIterator(nil, startTerm, endTermExclusive)
|
||||||
}, false)
|
}, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// calculateExclusiveEndFromPrefix produces the first key that
|
||||||
|
// does not have the same prefix as the input bytes, suitable
|
||||||
|
// to use as the end key in a traditional (inclusive, exclusive]
|
||||||
|
// start/end range
|
||||||
|
func calculateExclusiveEndFromPrefix(in []byte) []byte {
|
||||||
|
rv := make([]byte, len(in))
|
||||||
|
copy(rv, in)
|
||||||
|
for i := len(rv) - 1; i >= 0; i-- {
|
||||||
|
rv[i] = rv[i] + 1
|
||||||
|
if rv[i] != 0 {
|
||||||
|
return rv // didn't overflow, so stop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// all bytes were 0xff, so return nil
|
||||||
|
// as there is no end key for this prefix
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) FieldDictPrefix(field string,
|
func (i *IndexSnapshot) FieldDictPrefix(field string,
|
||||||
termPrefix []byte) (index.FieldDict, error) {
|
termPrefix []byte) (index.FieldDict, error) {
|
||||||
|
termPrefixEnd := calculateExclusiveEndFromPrefix(termPrefix)
|
||||||
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
||||||
return i.PrefixIterator(string(termPrefix))
|
return i.AutomatonIterator(nil, termPrefix, termPrefixEnd)
|
||||||
}, false)
|
}, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,7 +254,7 @@ func (i *IndexSnapshot) FieldDictRegexp(field string,
|
||||||
// TODO: potential optimization where the literal prefix represents the,
|
// TODO: potential optimization where the literal prefix represents the,
|
||||||
// entire regexp, allowing us to use PrefixIterator(prefixTerm)?
|
// entire regexp, allowing us to use PrefixIterator(prefixTerm)?
|
||||||
|
|
||||||
a, prefixBeg, prefixEnd, err := segment.ParseRegexp(termRegex)
|
a, prefixBeg, prefixEnd, err := parseRegexp(termRegex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -243,7 +284,7 @@ func (i *IndexSnapshot) FieldDictFuzzy(field string,
|
||||||
var prefixBeg, prefixEnd []byte
|
var prefixBeg, prefixEnd []byte
|
||||||
if prefix != "" {
|
if prefix != "" {
|
||||||
prefixBeg = []byte(prefix)
|
prefixBeg = []byte(prefix)
|
||||||
prefixEnd = segment.IncrementBytes(prefixBeg)
|
prefixEnd = calculateExclusiveEndFromPrefix(prefixBeg)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
||||||
|
@ -251,13 +292,6 @@ func (i *IndexSnapshot) FieldDictFuzzy(field string,
|
||||||
}, false)
|
}, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) FieldDictOnly(field string,
|
|
||||||
onlyTerms [][]byte, includeCount bool) (index.FieldDict, error) {
|
|
||||||
return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator {
|
|
||||||
return i.OnlyIterator(onlyTerms, includeCount)
|
|
||||||
}, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IndexSnapshot) FieldDictContains(field string) (index.FieldDictContains, error) {
|
func (i *IndexSnapshot) FieldDictContains(field string) (index.FieldDictContains, error) {
|
||||||
return i.newIndexSnapshotFieldDict(field, nil, true)
|
return i.newIndexSnapshotFieldDict(field, nil, true)
|
||||||
}
|
}
|
||||||
|
@ -349,7 +383,7 @@ func (i *IndexSnapshot) DocCount() (uint64, error) {
|
||||||
return rv, nil
|
return rv, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) Document(id string) (rv *document.Document, err error) {
|
func (i *IndexSnapshot) Document(id string) (rv index.Document, err error) {
|
||||||
// FIXME could be done more efficiently directly, but reusing for simplicity
|
// FIXME could be done more efficiently directly, but reusing for simplicity
|
||||||
tfr, err := i.TermFieldReader([]byte(id), "_id", false, false, false)
|
tfr, err := i.TermFieldReader([]byte(id), "_id", false, false, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -377,7 +411,7 @@ func (i *IndexSnapshot) Document(id string) (rv *document.Document, err error) {
|
||||||
}
|
}
|
||||||
segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum)
|
segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum)
|
||||||
|
|
||||||
rv = document.NewDocument(id)
|
rvd := document.NewDocument(id)
|
||||||
err = i.segment[segmentIndex].VisitDocument(localDocNum, func(name string, typ byte, val []byte, pos []uint64) bool {
|
err = i.segment[segmentIndex].VisitDocument(localDocNum, func(name string, typ byte, val []byte, pos []uint64) bool {
|
||||||
if name == "_id" {
|
if name == "_id" {
|
||||||
return true
|
return true
|
||||||
|
@ -389,15 +423,15 @@ func (i *IndexSnapshot) Document(id string) (rv *document.Document, err error) {
|
||||||
|
|
||||||
switch typ {
|
switch typ {
|
||||||
case 't':
|
case 't':
|
||||||
rv.AddField(document.NewTextField(name, arrayPos, value))
|
rvd.AddField(document.NewTextField(name, arrayPos, value))
|
||||||
case 'n':
|
case 'n':
|
||||||
rv.AddField(document.NewNumericFieldFromBytes(name, arrayPos, value))
|
rvd.AddField(document.NewNumericFieldFromBytes(name, arrayPos, value))
|
||||||
case 'd':
|
case 'd':
|
||||||
rv.AddField(document.NewDateTimeFieldFromBytes(name, arrayPos, value))
|
rvd.AddField(document.NewDateTimeFieldFromBytes(name, arrayPos, value))
|
||||||
case 'b':
|
case 'b':
|
||||||
rv.AddField(document.NewBooleanFieldFromBytes(name, arrayPos, value))
|
rvd.AddField(document.NewBooleanFieldFromBytes(name, arrayPos, value))
|
||||||
case 'g':
|
case 'g':
|
||||||
rv.AddField(document.NewGeoPointFieldFromBytes(name, arrayPos, value))
|
rvd.AddField(document.NewGeoPointFieldFromBytes(name, arrayPos, value))
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
@ -406,7 +440,7 @@ func (i *IndexSnapshot) Document(id string) (rv *document.Document, err error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return rv, nil
|
return rvd, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) segmentIndexAndLocalDocNumFromGlobal(docNum uint64) (int, uint64) {
|
func (i *IndexSnapshot) segmentIndexAndLocalDocNumFromGlobal(docNum uint64) (int, uint64) {
|
||||||
|
@ -563,40 +597,15 @@ func docInternalToNumber(in index.IndexInternalID) (uint64, error) {
|
||||||
return binary.BigEndian.Uint64(in), nil
|
return binary.BigEndian.Uint64(in), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexSnapshot) DocumentVisitFieldTerms(id index.IndexInternalID,
|
|
||||||
fields []string, visitor index.DocumentFieldTermVisitor) error {
|
|
||||||
_, err := i.documentVisitFieldTerms(id, fields, visitor, nil)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IndexSnapshot) documentVisitFieldTerms(id index.IndexInternalID,
|
|
||||||
fields []string, visitor index.DocumentFieldTermVisitor,
|
|
||||||
dvs segment.DocVisitState) (segment.DocVisitState, error) {
|
|
||||||
docNum, err := docInternalToNumber(id)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum)
|
|
||||||
if segmentIndex >= len(i.segment) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
_, dvs, err = i.documentVisitFieldTermsOnSegment(
|
|
||||||
segmentIndex, localDocNum, fields, nil, visitor, dvs)
|
|
||||||
|
|
||||||
return dvs, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *IndexSnapshot) documentVisitFieldTermsOnSegment(
|
func (i *IndexSnapshot) documentVisitFieldTermsOnSegment(
|
||||||
segmentIndex int, localDocNum uint64, fields []string, cFields []string,
|
segmentIndex int, localDocNum uint64, fields []string, cFields []string,
|
||||||
visitor index.DocumentFieldTermVisitor, dvs segment.DocVisitState) (
|
visitor index.DocValueVisitor, dvs segment.DocVisitState) (
|
||||||
cFieldsOut []string, dvsOut segment.DocVisitState, err error) {
|
cFieldsOut []string, dvsOut segment.DocVisitState, err error) {
|
||||||
ss := i.segment[segmentIndex]
|
ss := i.segment[segmentIndex]
|
||||||
|
|
||||||
var vFields []string // fields that are visitable via the segment
|
var vFields []string // fields that are visitable via the segment
|
||||||
|
|
||||||
ssv, ssvOk := ss.segment.(segment.DocumentFieldTermVisitable)
|
ssv, ssvOk := ss.segment.(segment.DocValueVisitable)
|
||||||
if ssvOk && ssv != nil {
|
if ssvOk && ssv != nil {
|
||||||
vFields, err = ssv.VisitableDocValueFields()
|
vFields, err = ssv.VisitableDocValueFields()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -627,7 +636,7 @@ func (i *IndexSnapshot) documentVisitFieldTermsOnSegment(
|
||||||
}
|
}
|
||||||
|
|
||||||
if ssvOk && ssv != nil && len(vFields) > 0 {
|
if ssvOk && ssv != nil && len(vFields) > 0 {
|
||||||
dvs, err = ssv.VisitDocumentFieldTerms(localDocNum, fields, visitor, dvs)
|
dvs, err = ssv.VisitDocValues(localDocNum, fields, visitor, dvs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
@ -662,7 +671,7 @@ type DocValueReader struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID,
|
func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID,
|
||||||
visitor index.DocumentFieldTermVisitor) (err error) {
|
visitor index.DocValueVisitor) (err error) {
|
||||||
docNum, err := docInternalToNumber(id)
|
docNum, err := docInternalToNumber(id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
|
@ -17,8 +17,8 @@ package scorch
|
||||||
import (
|
import (
|
||||||
"container/heap"
|
"container/heap"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type segmentDictCursor struct {
|
type segmentDictCursor struct {
|
|
@ -19,8 +19,8 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/size"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeIndexSnapshotDocIDReader int
|
var reflectStaticSizeIndexSnapshotDocIDReader int
|
|
@ -20,9 +20,9 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/size"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeIndexSnapshotTermFieldReader int
|
var reflectStaticSizeIndexSnapshotTermFieldReader int
|
|
@ -20,9 +20,9 @@ import (
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/index/scorch/segment"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/size"
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var TermSeparator byte = 0xff
|
var TermSeparator byte = 0xff
|
||||||
|
@ -62,8 +62,8 @@ func (s *SegmentSnapshot) Close() error {
|
||||||
return s.segment.Close()
|
return s.segment.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SegmentSnapshot) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error {
|
func (s *SegmentSnapshot) VisitDocument(num uint64, visitor segment.StoredFieldValueVisitor) error {
|
||||||
return s.segment.VisitDocument(num, visitor)
|
return s.segment.VisitStoredFields(num, visitor)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SegmentSnapshot) DocID(num uint64) ([]byte, error) {
|
func (s *SegmentSnapshot) DocID(num uint64) ([]byte, error) {
|
||||||
|
@ -147,7 +147,7 @@ func (cfd *cachedFieldDocs) prepareField(field string, ss *SegmentSnapshot) {
|
||||||
var postings segment.PostingsList
|
var postings segment.PostingsList
|
||||||
var postingsItr segment.PostingsIterator
|
var postingsItr segment.PostingsIterator
|
||||||
|
|
||||||
dictItr := dict.Iterator()
|
dictItr := dict.AutomatonIterator(nil, nil, nil)
|
||||||
next, err := dictItr.Next()
|
next, err := dictItr.Next()
|
||||||
for err == nil && next != nil {
|
for err == nil && next != nil {
|
||||||
var err1 error
|
var err1 error
|
||||||
|
@ -253,7 +253,7 @@ func (c *cachedDocs) updateSizeLOCKED() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *cachedDocs) visitDoc(localDocNum uint64,
|
func (c *cachedDocs) visitDoc(localDocNum uint64,
|
||||||
fields []string, visitor index.DocumentFieldTermVisitor) {
|
fields []string, visitor index.DocValueVisitor) {
|
||||||
c.m.Lock()
|
c.m.Lock()
|
||||||
|
|
||||||
for _, field := range fields {
|
for _, field := range fields {
|
|
@ -12,10 +12,11 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package segment
|
package scorch
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/RoaringBitmap/roaring"
|
"github.com/RoaringBitmap/roaring"
|
||||||
|
segment "github.com/blevesearch/scorch_segment_api"
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
@ -25,28 +26,28 @@ var reflectStaticSizeUnadornedPostingsIterator1Hit int
|
||||||
var reflectStaticSizeUnadornedPosting int
|
var reflectStaticSizeUnadornedPosting int
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
var pib UnadornedPostingsIteratorBitmap
|
var pib unadornedPostingsIteratorBitmap
|
||||||
reflectStaticSizeUnadornedPostingsIteratorBitmap = int(reflect.TypeOf(pib).Size())
|
reflectStaticSizeUnadornedPostingsIteratorBitmap = int(reflect.TypeOf(pib).Size())
|
||||||
var pi1h UnadornedPostingsIterator1Hit
|
var pi1h unadornedPostingsIterator1Hit
|
||||||
reflectStaticSizeUnadornedPostingsIterator1Hit = int(reflect.TypeOf(pi1h).Size())
|
reflectStaticSizeUnadornedPostingsIterator1Hit = int(reflect.TypeOf(pi1h).Size())
|
||||||
var up UnadornedPosting
|
var up UnadornedPosting
|
||||||
reflectStaticSizeUnadornedPosting = int(reflect.TypeOf(up).Size())
|
reflectStaticSizeUnadornedPosting = int(reflect.TypeOf(up).Size())
|
||||||
}
|
}
|
||||||
|
|
||||||
type UnadornedPostingsIteratorBitmap struct {
|
type unadornedPostingsIteratorBitmap struct {
|
||||||
actual roaring.IntPeekable
|
actual roaring.IntPeekable
|
||||||
actualBM *roaring.Bitmap
|
actualBM *roaring.Bitmap
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) Next() (Posting, error) {
|
func (i *unadornedPostingsIteratorBitmap) Next() (segment.Posting, error) {
|
||||||
return i.nextAtOrAfter(0)
|
return i.nextAtOrAfter(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) Advance(docNum uint64) (Posting, error) {
|
func (i *unadornedPostingsIteratorBitmap) Advance(docNum uint64) (segment.Posting, error) {
|
||||||
return i.nextAtOrAfter(docNum)
|
return i.nextAtOrAfter(docNum)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) nextAtOrAfter(atOrAfter uint64) (Posting, error) {
|
func (i *unadornedPostingsIteratorBitmap) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) {
|
||||||
docNum, exists := i.nextDocNumAtOrAfter(atOrAfter)
|
docNum, exists := i.nextDocNumAtOrAfter(atOrAfter)
|
||||||
if !exists {
|
if !exists {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -54,7 +55,7 @@ func (i *UnadornedPostingsIteratorBitmap) nextAtOrAfter(atOrAfter uint64) (Posti
|
||||||
return UnadornedPosting(docNum), nil
|
return UnadornedPosting(docNum), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) {
|
func (i *unadornedPostingsIteratorBitmap) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) {
|
||||||
if i.actual == nil || !i.actual.HasNext() {
|
if i.actual == nil || !i.actual.HasNext() {
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
@ -67,25 +68,25 @@ func (i *UnadornedPostingsIteratorBitmap) nextDocNumAtOrAfter(atOrAfter uint64)
|
||||||
return uint64(i.actual.Next()), true
|
return uint64(i.actual.Next()), true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) Size() int {
|
func (i *unadornedPostingsIteratorBitmap) Size() int {
|
||||||
return reflectStaticSizeUnadornedPostingsIteratorBitmap
|
return reflectStaticSizeUnadornedPostingsIteratorBitmap
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) ActualBitmap() *roaring.Bitmap {
|
func (i *unadornedPostingsIteratorBitmap) ActualBitmap() *roaring.Bitmap {
|
||||||
return i.actualBM
|
return i.actualBM
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) DocNum1Hit() (uint64, bool) {
|
func (i *unadornedPostingsIteratorBitmap) DocNum1Hit() (uint64, bool) {
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIteratorBitmap) ReplaceActual(actual *roaring.Bitmap) {
|
func (i *unadornedPostingsIteratorBitmap) ReplaceActual(actual *roaring.Bitmap) {
|
||||||
i.actualBM = actual
|
i.actualBM = actual
|
||||||
i.actual = actual.Iterator()
|
i.actual = actual.Iterator()
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUnadornedPostingsIteratorFromBitmap(bm *roaring.Bitmap) PostingsIterator {
|
func newUnadornedPostingsIteratorFromBitmap(bm *roaring.Bitmap) segment.PostingsIterator {
|
||||||
return &UnadornedPostingsIteratorBitmap{
|
return &unadornedPostingsIteratorBitmap{
|
||||||
actualBM: bm,
|
actualBM: bm,
|
||||||
actual: bm.Iterator(),
|
actual: bm.Iterator(),
|
||||||
}
|
}
|
||||||
|
@ -93,19 +94,19 @@ func NewUnadornedPostingsIteratorFromBitmap(bm *roaring.Bitmap) PostingsIterator
|
||||||
|
|
||||||
const docNum1HitFinished = math.MaxUint64
|
const docNum1HitFinished = math.MaxUint64
|
||||||
|
|
||||||
type UnadornedPostingsIterator1Hit struct {
|
type unadornedPostingsIterator1Hit struct {
|
||||||
docNum uint64
|
docNum uint64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIterator1Hit) Next() (Posting, error) {
|
func (i *unadornedPostingsIterator1Hit) Next() (segment.Posting, error) {
|
||||||
return i.nextAtOrAfter(0)
|
return i.nextAtOrAfter(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIterator1Hit) Advance(docNum uint64) (Posting, error) {
|
func (i *unadornedPostingsIterator1Hit) Advance(docNum uint64) (segment.Posting, error) {
|
||||||
return i.nextAtOrAfter(docNum)
|
return i.nextAtOrAfter(docNum)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIterator1Hit) nextAtOrAfter(atOrAfter uint64) (Posting, error) {
|
func (i *unadornedPostingsIterator1Hit) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) {
|
||||||
docNum, exists := i.nextDocNumAtOrAfter(atOrAfter)
|
docNum, exists := i.nextDocNumAtOrAfter(atOrAfter)
|
||||||
if !exists {
|
if !exists {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -113,7 +114,7 @@ func (i *UnadornedPostingsIterator1Hit) nextAtOrAfter(atOrAfter uint64) (Posting
|
||||||
return UnadornedPosting(docNum), nil
|
return UnadornedPosting(docNum), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIterator1Hit) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) {
|
func (i *unadornedPostingsIterator1Hit) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) {
|
||||||
if i.docNum == docNum1HitFinished {
|
if i.docNum == docNum1HitFinished {
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
@ -127,12 +128,12 @@ func (i *UnadornedPostingsIterator1Hit) nextDocNumAtOrAfter(atOrAfter uint64) (u
|
||||||
return docNum, true
|
return docNum, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *UnadornedPostingsIterator1Hit) Size() int {
|
func (i *unadornedPostingsIterator1Hit) Size() int {
|
||||||
return reflectStaticSizeUnadornedPostingsIterator1Hit
|
return reflectStaticSizeUnadornedPostingsIterator1Hit
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUnadornedPostingsIteratorFrom1Hit(docNum1Hit uint64) PostingsIterator {
|
func newUnadornedPostingsIteratorFrom1Hit(docNum1Hit uint64) segment.PostingsIterator {
|
||||||
return &UnadornedPostingsIterator1Hit{
|
return &unadornedPostingsIterator1Hit{
|
||||||
docNum1Hit,
|
docNum1Hit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -151,7 +152,7 @@ func (p UnadornedPosting) Norm() float64 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p UnadornedPosting) Locations() []Location {
|
func (p UnadornedPosting) Locations() []segment.Location {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,29 +15,46 @@
|
||||||
package upsidedown
|
package upsidedown
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/analysis"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/document"
|
|
||||||
"github.com/blevesearch/bleve/index"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func (udc *UpsideDownCouch) Analyze(d *document.Document) *index.AnalysisResult {
|
type IndexRow interface {
|
||||||
rv := &index.AnalysisResult{
|
KeySize() int
|
||||||
DocID: d.ID,
|
KeyTo([]byte) (int, error)
|
||||||
Rows: make([]index.IndexRow, 0, 100),
|
Key() []byte
|
||||||
|
|
||||||
|
ValueSize() int
|
||||||
|
ValueTo([]byte) (int, error)
|
||||||
|
Value() []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
docIDBytes := []byte(d.ID)
|
type AnalysisResult struct {
|
||||||
|
DocID string
|
||||||
|
Rows []IndexRow
|
||||||
|
}
|
||||||
|
|
||||||
|
func (udc *UpsideDownCouch) Analyze(d index.Document) *AnalysisResult {
|
||||||
|
return udc.analyze(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (udc *UpsideDownCouch) analyze(d index.Document) *AnalysisResult {
|
||||||
|
rv := &AnalysisResult{
|
||||||
|
DocID: d.ID(),
|
||||||
|
Rows: make([]IndexRow, 0, 100),
|
||||||
|
}
|
||||||
|
|
||||||
|
docIDBytes := []byte(d.ID())
|
||||||
|
|
||||||
// track our back index entries
|
// track our back index entries
|
||||||
backIndexStoredEntries := make([]*BackIndexStoreEntry, 0)
|
backIndexStoredEntries := make([]*BackIndexStoreEntry, 0)
|
||||||
|
|
||||||
// information we collate as we merge fields with same name
|
// information we collate as we merge fields with same name
|
||||||
fieldTermFreqs := make(map[uint16]analysis.TokenFrequencies)
|
fieldTermFreqs := make(map[uint16]index.TokenFrequencies)
|
||||||
fieldLengths := make(map[uint16]int)
|
fieldLengths := make(map[uint16]int)
|
||||||
fieldIncludeTermVectors := make(map[uint16]bool)
|
fieldIncludeTermVectors := make(map[uint16]bool)
|
||||||
fieldNames := make(map[uint16]string)
|
fieldNames := make(map[uint16]string)
|
||||||
|
|
||||||
analyzeField := func(field document.Field, storable bool) {
|
analyzeField := func(field index.Field, storable bool) {
|
||||||
fieldIndex, newFieldRow := udc.fieldIndexOrNewRow(field.Name())
|
fieldIndex, newFieldRow := udc.fieldIndexOrNewRow(field.Name())
|
||||||
if newFieldRow != nil {
|
if newFieldRow != nil {
|
||||||
rv.Rows = append(rv.Rows, newFieldRow)
|
rv.Rows = append(rv.Rows, newFieldRow)
|
||||||
|
@ -45,7 +62,9 @@ func (udc *UpsideDownCouch) Analyze(d *document.Document) *index.AnalysisResult
|
||||||
fieldNames[fieldIndex] = field.Name()
|
fieldNames[fieldIndex] = field.Name()
|
||||||
|
|
||||||
if field.Options().IsIndexed() {
|
if field.Options().IsIndexed() {
|
||||||
fieldLength, tokenFreqs := field.Analyze()
|
field.Analyze()
|
||||||
|
fieldLength := field.AnalyzedLength()
|
||||||
|
tokenFreqs := field.AnalyzedTokenFrequencies()
|
||||||
existingFreqs := fieldTermFreqs[fieldIndex]
|
existingFreqs := fieldTermFreqs[fieldIndex]
|
||||||
if existingFreqs == nil {
|
if existingFreqs == nil {
|
||||||
fieldTermFreqs[fieldIndex] = tokenFreqs
|
fieldTermFreqs[fieldIndex] = tokenFreqs
|
||||||
|
@ -66,21 +85,21 @@ func (udc *UpsideDownCouch) Analyze(d *document.Document) *index.AnalysisResult
|
||||||
// place information about indexed fields into map
|
// place information about indexed fields into map
|
||||||
// this collates information across fields with
|
// this collates information across fields with
|
||||||
// same names (arrays)
|
// same names (arrays)
|
||||||
for _, field := range d.Fields {
|
d.VisitFields(func(field index.Field) {
|
||||||
analyzeField(field, true)
|
analyzeField(field, true)
|
||||||
}
|
})
|
||||||
|
|
||||||
if len(d.CompositeFields) > 0 {
|
if d.HasComposite() {
|
||||||
for fieldIndex, tokenFreqs := range fieldTermFreqs {
|
for fieldIndex, tokenFreqs := range fieldTermFreqs {
|
||||||
// see if any of the composite fields need this
|
// see if any of the composite fields need this
|
||||||
for _, compositeField := range d.CompositeFields {
|
d.VisitComposite(func(field index.CompositeField) {
|
||||||
compositeField.Compose(fieldNames[fieldIndex], fieldLengths[fieldIndex], tokenFreqs)
|
field.Compose(fieldNames[fieldIndex], fieldLengths[fieldIndex], tokenFreqs)
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, compositeField := range d.CompositeFields {
|
d.VisitComposite(func(field index.CompositeField) {
|
||||||
analyzeField(compositeField, false)
|
analyzeField(field, false)
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
rowsCapNeeded := len(rv.Rows) + 1
|
rowsCapNeeded := len(rv.Rows) + 1
|
||||||
|
@ -88,7 +107,7 @@ func (udc *UpsideDownCouch) Analyze(d *document.Document) *index.AnalysisResult
|
||||||
rowsCapNeeded += len(tokenFreqs)
|
rowsCapNeeded += len(tokenFreqs)
|
||||||
}
|
}
|
||||||
|
|
||||||
rv.Rows = append(make([]index.IndexRow, 0, rowsCapNeeded), rv.Rows...)
|
rv.Rows = append(make([]IndexRow, 0, rowsCapNeeded), rv.Rows...)
|
||||||
|
|
||||||
backIndexTermsEntries := make([]*BackIndexTermsEntry, 0, len(fieldTermFreqs))
|
backIndexTermsEntries := make([]*BackIndexTermsEntry, 0, len(fieldTermFreqs))
|
||||||
|
|
|
@ -18,7 +18,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"sort"
|
"sort"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
// the functions in this file are only intended to be used by
|
// the functions in this file are only intended to be used by
|
|
@ -12,7 +12,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package index
|
package upsidedown
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sync"
|
"sync"
|
|
@ -17,8 +17,8 @@ package upsidedown
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/store"
|
store "github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UpsideDownCouchFieldDict struct {
|
type UpsideDownCouchFieldDict struct {
|
|
@ -17,9 +17,9 @@ package upsidedown
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/document"
|
"github.com/blevesearch/bleve/v2/document"
|
||||||
"github.com/blevesearch/bleve/index"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeIndexReader int
|
var reflectStaticSizeIndexReader int
|
||||||
|
@ -67,7 +67,7 @@ func (i *IndexReader) DocIDReaderOnly(ids []string) (index.DocIDReader, error) {
|
||||||
return newUpsideDownCouchDocIDReaderOnly(i, ids)
|
return newUpsideDownCouchDocIDReaderOnly(i, ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexReader) Document(id string) (doc *document.Document, err error) {
|
func (i *IndexReader) Document(id string) (doc index.Document, err error) {
|
||||||
// first hit the back index to confirm doc exists
|
// first hit the back index to confirm doc exists
|
||||||
var backIndexRow *BackIndexRow
|
var backIndexRow *BackIndexRow
|
||||||
backIndexRow, err = backIndexRowForDoc(i.kvreader, []byte(id))
|
backIndexRow, err = backIndexRowForDoc(i.kvreader, []byte(id))
|
||||||
|
@ -77,7 +77,7 @@ func (i *IndexReader) Document(id string) (doc *document.Document, err error) {
|
||||||
if backIndexRow == nil {
|
if backIndexRow == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
doc = document.NewDocument(id)
|
rvd := document.NewDocument(id)
|
||||||
storedRow := NewStoredRow([]byte(id), 0, []uint64{}, 'x', nil)
|
storedRow := NewStoredRow([]byte(id), 0, []uint64{}, 'x', nil)
|
||||||
storedRowScanPrefix := storedRow.ScanPrefixForDoc()
|
storedRowScanPrefix := storedRow.ScanPrefixForDoc()
|
||||||
it := i.kvreader.PrefixIterator(storedRowScanPrefix)
|
it := i.kvreader.PrefixIterator(storedRowScanPrefix)
|
||||||
|
@ -93,24 +93,23 @@ func (i *IndexReader) Document(id string) (doc *document.Document, err error) {
|
||||||
var row *StoredRow
|
var row *StoredRow
|
||||||
row, err = NewStoredRowKV(key, safeVal)
|
row, err = NewStoredRowKV(key, safeVal)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
doc = nil
|
return nil, err
|
||||||
return
|
|
||||||
}
|
}
|
||||||
if row != nil {
|
if row != nil {
|
||||||
fieldName := i.index.fieldCache.FieldIndexed(row.field)
|
fieldName := i.index.fieldCache.FieldIndexed(row.field)
|
||||||
field := decodeFieldType(row.typ, fieldName, row.arrayPositions, row.value)
|
field := decodeFieldType(row.typ, fieldName, row.arrayPositions, row.value)
|
||||||
if field != nil {
|
if field != nil {
|
||||||
doc.AddField(field)
|
rvd.AddField(field)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
it.Next()
|
it.Next()
|
||||||
key, val, valid = it.Current()
|
key, val, valid = it.Current()
|
||||||
}
|
}
|
||||||
return
|
return rvd, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *IndexReader) DocumentVisitFieldTerms(id index.IndexInternalID, fields []string, visitor index.DocumentFieldTermVisitor) error {
|
func (i *IndexReader) documentVisitFieldTerms(id index.IndexInternalID, fields []string, visitor index.DocValueVisitor) error {
|
||||||
fieldsMap := make(map[uint16]string, len(fields))
|
fieldsMap := make(map[uint16]string, len(fields))
|
||||||
for _, f := range fields {
|
for _, f := range fields {
|
||||||
id, ok := i.index.fieldCache.FieldNamed(f, false)
|
id, ok := i.index.fieldCache.FieldNamed(f, false)
|
||||||
|
@ -221,6 +220,6 @@ type DocValueReader struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID,
|
func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID,
|
||||||
visitor index.DocumentFieldTermVisitor) error {
|
visitor index.DocValueVisitor) error {
|
||||||
return dvr.i.DocumentVisitFieldTerms(id, dvr.fields, visitor)
|
return dvr.i.documentVisitFieldTerms(id, dvr.fields, visitor)
|
||||||
}
|
}
|
|
@ -20,9 +20,9 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/blevesearch/bleve/index/store"
|
index "github.com/blevesearch/bleve_index_api"
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectStaticSizeUpsideDownCouchTermFieldReader int
|
var reflectStaticSizeUpsideDownCouchTermFieldReader int
|
|
@ -22,7 +22,7 @@ import (
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/size"
|
"github.com/blevesearch/bleve/v2/size"
|
||||||
"github.com/golang/protobuf/proto"
|
"github.com/golang/protobuf/proto"
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,7 +18,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type indexStat struct {
|
type indexStat struct {
|
|
@ -15,7 +15,7 @@
|
||||||
package boltdb
|
package boltdb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/index/store"
|
store "github.com/blevesearch/upsidedown_store_api"
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
|
@ -28,8 +28,8 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
"github.com/blevesearch/bleve/registry"
|
store "github.com/blevesearch/upsidedown_store_api"
|
||||||
bolt "go.etcd.io/bbolt"
|
bolt "go.etcd.io/bbolt"
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,7 +17,7 @@ package boltdb
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
store "github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Writer struct {
|
type Writer struct {
|
|
@ -18,7 +18,7 @@
|
||||||
package gtreap
|
package gtreap
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
|
|
||||||
"github.com/steveyen/gtreap"
|
"github.com/steveyen/gtreap"
|
||||||
)
|
)
|
|
@ -24,8 +24,8 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
"github.com/blevesearch/bleve/registry"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
"github.com/steveyen/gtreap"
|
"github.com/steveyen/gtreap"
|
||||||
)
|
)
|
||||||
|
|
|
@ -21,7 +21,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/index/store"
|
"github.com/blevesearch/upsidedown_store_api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Writer struct {
|
type Writer struct {
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue