diff --git a/cmd/query/main.go b/cmd/query/main.go index 1d8c243..6351cc6 100755 --- a/cmd/query/main.go +++ b/cmd/query/main.go @@ -8,12 +8,13 @@ import ( "context" "flag" "fmt" - "github.com/aaronland/go-sqlite/v2" - "github.com/aaronland/go-sqlite/v2/query" - "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2/flags" "log" "os" "strings" + + "github.com/aaronland/go-sqlite/v2" + "github.com/aaronland/go-sqlite/v2/query" + "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2/flags" ) func main() { diff --git a/flags/flags.go b/flags/flags.go index bcaa561..d50f506 100644 --- a/flags/flags.go +++ b/flags/flags.go @@ -3,9 +3,10 @@ package flags import ( "fmt" - "github.com/whosonfirst/go-whosonfirst-flags/existential" "strconv" "strings" + + "github.com/whosonfirst/go-whosonfirst-flags/existential" ) // ExistentialFlagsToQueryConditions converts 'flag_label' and 'str_labels' in a SQL query and its query arguments. diff --git a/go.mod b/go.mod index 92ff6c3..9c55592 100644 --- a/go.mod +++ b/go.mod @@ -3,40 +3,42 @@ module github.com/whosonfirst/go-whosonfirst-sqlite-features/v2 go 1.18 require ( - github.com/aaronland/go-sqlite-modernc v0.0.1 + github.com/aaronland/go-sqlite-modernc v0.0.3 github.com/aaronland/go-sqlite/v2 v2.2.0 - github.com/paulmach/orb v0.7.1 - github.com/tidwall/gjson v1.14.4 - github.com/whosonfirst/go-whosonfirst-feature v0.0.25 - github.com/whosonfirst/go-whosonfirst-flags v0.4.4 + github.com/paulmach/orb v0.11.1 + github.com/tidwall/gjson v1.17.0 + github.com/whosonfirst/go-whosonfirst-feature v0.0.27 + github.com/whosonfirst/go-whosonfirst-flags v0.5.1 github.com/whosonfirst/go-whosonfirst-names v0.1.0 - github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.3 + github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.7 ) require ( github.com/aaronland/go-roster v1.0.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect github.com/google/uuid v1.3.0 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect github.com/mattn/go-isatty v0.0.16 // indirect - github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/sfomuseum/go-edtf v1.1.1 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.0 // indirect github.com/whosonfirst/go-rfc-5646 v0.1.0 // indirect github.com/whosonfirst/go-whosonfirst-sources v0.1.0 // indirect - github.com/whosonfirst/go-whosonfirst-uri v1.2.0 // indirect + github.com/whosonfirst/go-whosonfirst-uri v1.3.0 // indirect + go.mongodb.org/mongo-driver v1.11.4 // indirect golang.org/x/mod v0.3.0 // indirect - golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab // indirect + golang.org/x/sys v0.9.0 // indirect golang.org/x/tools v0.0.0-20210106214847-113979e3529a // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect lukechampine.com/uint128 v1.2.0 // indirect modernc.org/cc/v3 v3.40.0 // indirect modernc.org/ccgo/v3 v3.16.13 // indirect - modernc.org/libc v1.21.5 // indirect - modernc.org/mathutil v1.5.0 // indirect - modernc.org/memory v1.4.0 // indirect + modernc.org/libc v1.29.0 // indirect + modernc.org/mathutil v1.6.0 // indirect + modernc.org/memory v1.7.2 // indirect modernc.org/opt v0.1.3 // indirect - modernc.org/sqlite v1.20.0 // indirect + modernc.org/sqlite v1.28.0 // indirect modernc.org/strutil v1.1.3 // indirect modernc.org/token v1.0.1 // indirect ) diff --git a/go.sum b/go.sum index 530aed9..44bfe0b 100644 --- a/go.sum +++ b/go.sum @@ -1,63 +1,81 @@ github.com/aaronland/go-roster v1.0.0 h1:FRDGrTqsYySKjWnAhbBGXyeGlI/o5/t9FZYCbUmyQtI= github.com/aaronland/go-roster v1.0.0/go.mod h1:KIsYZgrJlAsyb9LsXSCvlqvbcCBVjCSqcQiZx42i9ro= -github.com/aaronland/go-sqlite-modernc v0.0.1 h1:XvHhXFN+t7dkbddg8Qeuo5Vewme8O56vtdGhp2APNSI= -github.com/aaronland/go-sqlite-modernc v0.0.1/go.mod h1:nOY4U91arOGKkgc2KFn0VBsKQPy7Si4d/H0P6zuiALk= +github.com/aaronland/go-sqlite-modernc v0.0.3 h1:jCXzkZSLUgYbEY5FJluQY/x7hEfeYJ0LYxFYbTPTdXU= +github.com/aaronland/go-sqlite-modernc v0.0.3/go.mod h1:utUqGH4Wu7rD7BYZ7dIg35MwmTuHziIUWkEw1mdjbGw= github.com/aaronland/go-sqlite/v2 v2.2.0 h1:Efase4L+f5LgWYEHiA/dUwEuNzJgJdjOVElHMvGEZRw= github.com/aaronland/go-sqlite/v2 v2.2.0/go.mod h1:CI0V49+b2LZvf27tSFPJ9jw6F594gHxrtVt+OdMT3TU= -github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-multierror v0.0.0-20171204182908-b7773ae21874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= -github.com/paulmach/orb v0.7.1 h1:Zha++Z5OX/l168sqHK3k4z18LDvr+YAO/VjK0ReQ9rU= -github.com/paulmach/orb v0.7.1/go.mod h1:FWRlTgl88VI1RBx/MkrwWDRhQ96ctqMCh8boXhmqB/A= +github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU= +github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/sfomuseum/go-edtf v1.1.1 h1:R5gElndHGDaK/rGSh2X+ulaLtlcHCdQA1cTzB8e9wv8= github.com/sfomuseum/go-edtf v1.1.1/go.mod h1:1rP0EJZ/84j3HO80vGcnG2T9MFBDAFyTNtjrr8cv3T4= -github.com/sfomuseum/go-flags v0.7.0/go.mod h1:ML3DTNbF9xnjExSdS/9FtVLjIUhRU5gm/ehzISv+t2w= -github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM= -github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= +github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/whosonfirst/go-rfc-5646 v0.1.0 h1:HNFPAem6v5De61PXLgbGzx9tfNOP83AAkVvm9WAddJY= github.com/whosonfirst/go-rfc-5646 v0.1.0/go.mod h1:JZj//FV9YeV3fkyOY/82V53EMLQXwRwNPuQIGs8BUmo= -github.com/whosonfirst/go-whosonfirst-feature v0.0.25 h1:6s2to+aWB9Fo+IcwSvsB2UiuymJPKSe8g0TwuRasLhE= -github.com/whosonfirst/go-whosonfirst-feature v0.0.25/go.mod h1:JTauqAjTuZDfOTHxv5hFUzknF0NGgxvHTT5maTej9Co= -github.com/whosonfirst/go-whosonfirst-flags v0.4.4 h1:pwUnE8btx4Po6N5+uq0IrKVF0WraY5YW3tc3saIDg3A= -github.com/whosonfirst/go-whosonfirst-flags v0.4.4/go.mod h1:C9X7vQvNR/u55AfOHYUT8o7dRACWV++ZV4B0kylnIaY= +github.com/whosonfirst/go-whosonfirst-feature v0.0.27 h1:8RoiadvQEo8RFq8HFezq/Mwm/7UXR+dNJpE9oP8kvfQ= +github.com/whosonfirst/go-whosonfirst-feature v0.0.27/go.mod h1:vAtQysyMODE/ynMxSrHJ2eCBJRNFj9xUszrURnOy9Xc= +github.com/whosonfirst/go-whosonfirst-flags v0.5.1 h1:kRzXK7WZlEK1hNw+CECEdnWNtEDbWbjWdEg0imu1mGE= +github.com/whosonfirst/go-whosonfirst-flags v0.5.1/go.mod h1:VgXcWNtsCZGy/Xnt9bpSUTKJ3nYeqXqvLD3NrE6kzZg= github.com/whosonfirst/go-whosonfirst-names v0.1.0 h1:uXop/DwQqH60uDBZvHCPg1yRSQLScbm6VZyqcaED2KE= github.com/whosonfirst/go-whosonfirst-names v0.1.0/go.mod h1:0z86/nedM9T/5C8cAdbCMfRuBrkc33oEQ6vdJ6WybSg= -github.com/whosonfirst/go-whosonfirst-placetypes v0.3.0/go.mod h1:ez0VFkGFbgT2/z2oi3PIuW6FewsZ2+5glyfDD79XEHk= github.com/whosonfirst/go-whosonfirst-sources v0.1.0 h1:JuKLa6KWke22jBfJ1pM9WQHoz1/3pbDv2C+aR+THPPQ= github.com/whosonfirst/go-whosonfirst-sources v0.1.0/go.mod h1:EUMHyGzUmqPPxlMmOp+28BFeoBdxxE0HCKRd67lkqGM= -github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.3 h1:dc7BdkGrSKtGyIEO8jW4h6xS0DMdKANwMHFRc1+WcnM= -github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.3/go.mod h1:3Sym68fXOfIAcdJiRkJC+8FO+JuSGZ2NuDpRFzWMDi0= -github.com/whosonfirst/go-whosonfirst-uri v1.2.0 h1:lhmRsIhcpTr5HAo+kXKRGsSt76HTh3Ko/oTR2jpCm/o= -github.com/whosonfirst/go-whosonfirst-uri v1.2.0/go.mod h1:CuVygTCUpMG945MMvqHyqxvc/L5YkDaMrrVpRFr7ZxY= -github.com/whosonfirst/warning v0.1.1/go.mod h1:/unEMzhB9YaMeEwTJpzLN3kM5LiSxdJhKEsf/OQhn6s= +github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.7 h1:1j4IMuVua4/NhqKm2ke16h2V5Z2XiLoOdXbjDrfZb0E= +github.com/whosonfirst/go-whosonfirst-spr/v2 v2.3.7/go.mod h1:I2IqsIutlDrvYp37Xzwk4WGq+m/gLh+gEifiLmH0bac= +github.com/whosonfirst/go-whosonfirst-uri v1.3.0 h1:LYOVLqP9rWQxauYVkdw65j5LZxEi8OK0GHh/qCEpX4g= +github.com/whosonfirst/go-whosonfirst-uri v1.3.0/go.mod h1:CuVygTCUpMG945MMvqHyqxvc/L5YkDaMrrVpRFr7ZxY= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= +github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.mongodb.org/mongo-driver v1.11.4 h1:4ayjakA013OdpGyL2K3ZqylTac/rMjrJOMZ1EHizXas= +go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -65,16 +83,25 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab h1:2QkjZIsXupsJbJIdSjjUOgWK3aEtzyuh2mPt3l/CkeU= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= +golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= @@ -87,6 +114,11 @@ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1N golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI= lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= modernc.org/cc/v3 v3.40.0 h1:P3g79IUS/93SYhtoeaHW+kRCIrYaxJ27MFPv+7kaTOw= @@ -95,19 +127,19 @@ modernc.org/ccgo/v3 v3.16.13 h1:Mkgdzl46i5F/CNR/Kj80Ri59hC8TKAhZrYSaqvkwzUw= modernc.org/ccgo/v3 v3.16.13/go.mod h1:2Quk+5YgpImhPjv2Qsob1DnZ/4som1lJTodubIcoUkY= modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk= modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM= -modernc.org/libc v1.21.5 h1:xBkU9fnHV+hvZuPSRszN0AXDG4M7nwPLwTWwkYcvLCI= -modernc.org/libc v1.21.5/go.mod h1:przBsL5RDOZajTVslkugzLBj1evTue36jEomFQOoYuI= -modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ= -modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/memory v1.4.0 h1:crykUfNSnMAXaOJnnxcSzbUGMqkLWjklJKkBK2nwZwk= -modernc.org/memory v1.4.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= +modernc.org/libc v1.29.0 h1:tTFRFq69YKCF2QyGNuRUQxKBm1uZZLubf6Cjh/pVHXs= +modernc.org/libc v1.29.0/go.mod h1:DaG/4Q3LRRdqpiLyP0C2m1B8ZMGkQ+cCgOIjEtQlYhQ= +modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4= +modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= +modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E= +modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E= modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/sqlite v1.20.0 h1:80zmD3BGkm8BZ5fUi/4lwJQHiO3GXgIUvZRXpoIfROY= -modernc.org/sqlite v1.20.0/go.mod h1:EsYz8rfOvLCiYTy5ZFsOYzoCcRMu98YYkwAcCw5YIYw= +modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ= +modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0= modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY= modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= -modernc.org/tcl v1.15.0 h1:oY+JeD11qVVSgVvodMJsu7Edf8tr5E/7tuhF5cNYz34= +modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY= modernc.org/token v1.0.1 h1:A3qvTqOwexpfZZeyI0FeGPDlSWX5pjZu9hF4lU+EKWg= modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/z v1.7.0 h1:xkDw/KepgEjeizO2sNco+hqYkU12taxQFqPEmgm1GWE= +modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY= diff --git a/tables/ancestors.go b/tables/ancestors.go index f2907fc..006eb98 100755 --- a/tables/ancestors.go +++ b/tables/ancestors.go @@ -3,13 +3,16 @@ package tables import ( "context" "fmt" + "strings" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" - "strings" ) +const ANCESTORS_TABLE_NAME string = "ancestors" + type AncestorsTable struct { features.FeatureTable name string @@ -42,7 +45,7 @@ func NewAncestorsTableWithDatabase(ctx context.Context, db sqlite.Database) (sql func NewAncestorsTable(ctx context.Context) (sqlite.Table, error) { t := AncestorsTable{ - name: "ancestors", + name: ANCESTORS_TABLE_NAME, } return &t, nil diff --git a/tables/concordances.go b/tables/concordances.go index 9cc2727..600ae96 100755 --- a/tables/concordances.go +++ b/tables/concordances.go @@ -3,12 +3,15 @@ package tables import ( "context" "fmt" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" ) +const CONCORDANCES_TABLE_NAME string = "concordances" + type ConcordancesTable struct { features.FeatureTable name string @@ -41,7 +44,7 @@ func NewConcordancesTableWithDatabase(ctx context.Context, db sqlite.Database) ( func NewConcordancesTable(ctx context.Context) (sqlite.Table, error) { t := ConcordancesTable{ - name: "concordances", + name: CONCORDANCES_TABLE_NAME, } return &t, nil diff --git a/tables/error.go b/tables/error.go index fb35e83..9bb8b8f 100644 --- a/tables/error.go +++ b/tables/error.go @@ -2,6 +2,7 @@ package tables import ( "fmt" + "github.com/aaronland/go-sqlite/v2" ) diff --git a/tables/geojson.go b/tables/geojson.go index a945efc..bfaf194 100755 --- a/tables/geojson.go +++ b/tables/geojson.go @@ -3,12 +3,15 @@ package tables import ( "context" "fmt" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" ) +const GEOJSON_TABLE_NAME string = "geojson" + type GeoJSONTableOptions struct { IndexAltFiles bool AllowMissingSourceGeom bool @@ -78,7 +81,7 @@ func NewGeoJSONTable(ctx context.Context) (sqlite.Table, error) { func NewGeoJSONTableWithOptions(ctx context.Context, opts *GeoJSONTableOptions) (sqlite.Table, error) { t := GeoJSONTable{ - name: "geojson", + name: GEOJSON_TABLE_NAME, options: opts, } diff --git a/tables/geometries.go b/tables/geometries.go index 1c23cda..86c53af 100644 --- a/tables/geometries.go +++ b/tables/geometries.go @@ -3,15 +3,18 @@ package tables import ( "context" "fmt" + _ "log" + "github.com/aaronland/go-sqlite/v2" "github.com/paulmach/orb/encoding/wkt" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/geometry" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" - _ "log" ) +const GEOMETRIES_TABLE_NAME string = "geometries" + type GeometriesTableOptions struct { IndexAltFiles bool } @@ -51,7 +54,7 @@ func NewGeometriesTable(ctx context.Context) (sqlite.Table, error) { func NewGeometriesTableWithOptions(ctx context.Context, opts *GeometriesTableOptions) (sqlite.Table, error) { t := GeometriesTable{ - name: "geometries", + name: GEOMETRIES_TABLE_NAME, options: opts, } diff --git a/tables/names.go b/tables/names.go index 7bb170d..ed04586 100755 --- a/tables/names.go +++ b/tables/names.go @@ -10,6 +10,8 @@ import ( "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" ) +const NAMES_TABLE_NAME string = "names" + type NamesTable struct { features.FeatureTable name string @@ -50,7 +52,7 @@ func NewNamesTableWithDatabase(ctx context.Context, db sqlite.Database) (sqlite. func NewNamesTable(ctx context.Context) (sqlite.Table, error) { t := NamesTable{ - name: "names", + name: NAMES_TABLE_NAME, } return &t, nil diff --git a/tables/properties.go b/tables/properties.go index c5133ac..c49a5a7 100755 --- a/tables/properties.go +++ b/tables/properties.go @@ -3,6 +3,7 @@ package tables import ( "context" "fmt" + "github.com/aaronland/go-sqlite/v2" "github.com/tidwall/gjson" "github.com/whosonfirst/go-whosonfirst-feature/alt" @@ -10,6 +11,8 @@ import ( "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" ) +const PROPERTIES_TABLE_NAME string = "properties" + type PropertiesTableOptions struct { IndexAltFiles bool } @@ -77,7 +80,7 @@ func NewPropertiesTable(ctx context.Context) (sqlite.Table, error) { func NewPropertiesTableWithOptions(ctx context.Context, opts *PropertiesTableOptions) (sqlite.Table, error) { t := PropertiesTable{ - name: "properties", + name: PROPERTIES_TABLE_NAME, options: opts, } diff --git a/tables/rtree.go b/tables/rtree.go index 029dd65..ef143ae 100755 --- a/tables/rtree.go +++ b/tables/rtree.go @@ -5,6 +5,7 @@ package tables import ( "context" "fmt" + "github.com/aaronland/go-sqlite/v2" "github.com/paulmach/orb" "github.com/paulmach/orb/encoding/wkt" @@ -12,9 +13,10 @@ import ( "github.com/whosonfirst/go-whosonfirst-feature/geometry" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" - _ "log" ) +const RTREE_TABLE_NAME string = "rtree" + type RTreeTableOptions struct { IndexAltFiles bool } @@ -48,7 +50,7 @@ func NewRTreeTable(ctx context.Context) (sqlite.Table, error) { func NewRTreeTableWithOptions(ctx context.Context, opts *RTreeTableOptions) (sqlite.Table, error) { t := RTreeTable{ - name: "rtree", + name: RTREE_TABLE_NAME, options: opts, } diff --git a/tables/search.go b/tables/search.go index efb5a68..7ee33c1 100755 --- a/tables/search.go +++ b/tables/search.go @@ -3,15 +3,18 @@ package tables import ( "context" "fmt" + _ "log" + "strings" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-names/tags" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" - _ "log" - "strings" ) +const SEARCH_TABLE_NAME string = "search" + type SearchTable struct { features.FeatureTable name string @@ -37,7 +40,7 @@ func NewSearchTableWithDatabase(ctx context.Context, db sqlite.Database) (sqlite func NewSearchTable(ctx context.Context) (sqlite.Table, error) { t := SearchTable{ - name: "search", + name: SEARCH_TABLE_NAME, } return &t, nil diff --git a/tables/spr.go b/tables/spr.go index 052f8f4..d74a619 100755 --- a/tables/spr.go +++ b/tables/spr.go @@ -3,16 +3,19 @@ package tables import ( "context" "fmt" + _ "log" + "strconv" + "strings" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-spr/v2" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" - _ "log" - "strconv" - "strings" ) +const SPR_TABLE_NAME string = "spr" + type SPRTableOptions struct { IndexAltFiles bool } @@ -46,7 +49,7 @@ func NewSPRTable(ctx context.Context) (sqlite.Table, error) { func NewSPRTableWithOptions(ctx context.Context, opts *SPRTableOptions) (sqlite.Table, error) { t := SPRTable{ - name: "spr", + name: SPR_TABLE_NAME, options: opts, } diff --git a/tables/supersedes.go b/tables/supersedes.go index f7784c0..ed632d9 100644 --- a/tables/supersedes.go +++ b/tables/supersedes.go @@ -3,12 +3,15 @@ package tables import ( "context" "fmt" + "github.com/aaronland/go-sqlite/v2" "github.com/whosonfirst/go-whosonfirst-feature/alt" "github.com/whosonfirst/go-whosonfirst-feature/properties" "github.com/whosonfirst/go-whosonfirst-sqlite-features/v2" ) +const SUPERSEDES_TABLE_NAME string = "supersedes" + type SupersedesTable struct { features.FeatureTable name string @@ -34,7 +37,7 @@ func NewSupersedesTableWithDatabase(ctx context.Context, db sqlite.Database) (sq func NewSupersedesTable(ctx context.Context) (sqlite.Table, error) { t := SupersedesTable{ - name: "supersedes", + name: SUPERSEDES_TABLE_NAME, } return &t, nil diff --git a/vendor/github.com/aaronland/go-sqlite-modernc/database.go b/vendor/github.com/aaronland/go-sqlite-modernc/database.go index fdee94b..b3a7cdb 100644 --- a/vendor/github.com/aaronland/go-sqlite-modernc/database.go +++ b/vendor/github.com/aaronland/go-sqlite-modernc/database.go @@ -4,17 +4,61 @@ import ( "context" "database/sql" "fmt" + "sync" + "github.com/aaronland/go-sqlite/v2" - "github.com/aaronland/go-sqlite/v2/database" + "github.com/aaronland/go-sqlite/v2/database" _ "modernc.org/sqlite" ) const SQLITE_SCHEME string = "modernc" const SQLITE_DRIVER string = "sqlite" +// In principle this could also be done with a sync.OnceFunc call but that will +// require that everyone uses Go 1.21 (whose package import changes broke everything) +// which is literally days old as I write this. So maybe a few releases after 1.21. + +var register_mu = new(sync.RWMutex) +var register_map = map[string]bool{} + func init() { + ctx := context.Background() - sqlite.RegisterDatabase(ctx, SQLITE_SCHEME, NewModerncDatabase) + err := RegisterSQLiteSchemes(ctx) + + if err != nil { + panic(err) + } +} + +// RegisterSQLiteSchemes will explicitly register all the schemes associated with the `client.Client` interface. +func RegisterSQLiteSchemes(ctx context.Context) error { + + roster := map[string]sqlite.DatabaseInitializationFunc{ + SQLITE_SCHEME: NewModerncDatabase, + } + + register_mu.Lock() + defer register_mu.Unlock() + + for scheme, fn := range roster { + + _, exists := register_map[scheme] + + if exists { + continue + } + + err := sqlite.RegisterDatabase(ctx, scheme, fn) + + if err != nil { + return fmt.Errorf("Failed to register database for '%s', %w", scheme, err) + } + + register_map[scheme] = true + } + + return nil } func NewModerncDatabase(ctx context.Context, db_uri string) (sqlite.Database, error) { diff --git a/vendor/github.com/dustin/go-humanize/LICENSE b/vendor/github.com/dustin/go-humanize/LICENSE new file mode 100644 index 0000000..8d9a94a --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2005-2008 Dustin Sallings + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + diff --git a/vendor/github.com/dustin/go-humanize/README.markdown b/vendor/github.com/dustin/go-humanize/README.markdown new file mode 100644 index 0000000..7d0b16b --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/README.markdown @@ -0,0 +1,124 @@ +# Humane Units [![Build Status](https://travis-ci.org/dustin/go-humanize.svg?branch=master)](https://travis-ci.org/dustin/go-humanize) [![GoDoc](https://godoc.org/github.com/dustin/go-humanize?status.svg)](https://godoc.org/github.com/dustin/go-humanize) + +Just a few functions for helping humanize times and sizes. + +`go get` it as `github.com/dustin/go-humanize`, import it as +`"github.com/dustin/go-humanize"`, use it as `humanize`. + +See [godoc](https://pkg.go.dev/github.com/dustin/go-humanize) for +complete documentation. + +## Sizes + +This lets you take numbers like `82854982` and convert them to useful +strings like, `83 MB` or `79 MiB` (whichever you prefer). + +Example: + +```go +fmt.Printf("That file is %s.", humanize.Bytes(82854982)) // That file is 83 MB. +``` + +## Times + +This lets you take a `time.Time` and spit it out in relative terms. +For example, `12 seconds ago` or `3 days from now`. + +Example: + +```go +fmt.Printf("This was touched %s.", humanize.Time(someTimeInstance)) // This was touched 7 hours ago. +``` + +Thanks to Kyle Lemons for the time implementation from an IRC +conversation one day. It's pretty neat. + +## Ordinals + +From a [mailing list discussion][odisc] where a user wanted to be able +to label ordinals. + + 0 -> 0th + 1 -> 1st + 2 -> 2nd + 3 -> 3rd + 4 -> 4th + [...] + +Example: + +```go +fmt.Printf("You're my %s best friend.", humanize.Ordinal(193)) // You are my 193rd best friend. +``` + +## Commas + +Want to shove commas into numbers? Be my guest. + + 0 -> 0 + 100 -> 100 + 1000 -> 1,000 + 1000000000 -> 1,000,000,000 + -100000 -> -100,000 + +Example: + +```go +fmt.Printf("You owe $%s.\n", humanize.Comma(6582491)) // You owe $6,582,491. +``` + +## Ftoa + +Nicer float64 formatter that removes trailing zeros. + +```go +fmt.Printf("%f", 2.24) // 2.240000 +fmt.Printf("%s", humanize.Ftoa(2.24)) // 2.24 +fmt.Printf("%f", 2.0) // 2.000000 +fmt.Printf("%s", humanize.Ftoa(2.0)) // 2 +``` + +## SI notation + +Format numbers with [SI notation][sinotation]. + +Example: + +```go +humanize.SI(0.00000000223, "M") // 2.23 nM +``` + +## English-specific functions + +The following functions are in the `humanize/english` subpackage. + +### Plurals + +Simple English pluralization + +```go +english.PluralWord(1, "object", "") // object +english.PluralWord(42, "object", "") // objects +english.PluralWord(2, "bus", "") // buses +english.PluralWord(99, "locus", "loci") // loci + +english.Plural(1, "object", "") // 1 object +english.Plural(42, "object", "") // 42 objects +english.Plural(2, "bus", "") // 2 buses +english.Plural(99, "locus", "loci") // 99 loci +``` + +### Word series + +Format comma-separated words lists with conjuctions: + +```go +english.WordSeries([]string{"foo"}, "and") // foo +english.WordSeries([]string{"foo", "bar"}, "and") // foo and bar +english.WordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar and baz + +english.OxfordWordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar, and baz +``` + +[odisc]: https://groups.google.com/d/topic/golang-nuts/l8NhI74jl-4/discussion +[sinotation]: http://en.wikipedia.org/wiki/Metric_prefix diff --git a/vendor/github.com/dustin/go-humanize/big.go b/vendor/github.com/dustin/go-humanize/big.go new file mode 100644 index 0000000..f49dc33 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/big.go @@ -0,0 +1,31 @@ +package humanize + +import ( + "math/big" +) + +// order of magnitude (to a max order) +func oomm(n, b *big.Int, maxmag int) (float64, int) { + mag := 0 + m := &big.Int{} + for n.Cmp(b) >= 0 { + n.DivMod(n, b, m) + mag++ + if mag == maxmag && maxmag >= 0 { + break + } + } + return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag +} + +// total order of magnitude +// (same as above, but with no upper limit) +func oom(n, b *big.Int) (float64, int) { + mag := 0 + m := &big.Int{} + for n.Cmp(b) >= 0 { + n.DivMod(n, b, m) + mag++ + } + return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag +} diff --git a/vendor/github.com/dustin/go-humanize/bigbytes.go b/vendor/github.com/dustin/go-humanize/bigbytes.go new file mode 100644 index 0000000..3b015fd --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/bigbytes.go @@ -0,0 +1,189 @@ +package humanize + +import ( + "fmt" + "math/big" + "strings" + "unicode" +) + +var ( + bigIECExp = big.NewInt(1024) + + // BigByte is one byte in bit.Ints + BigByte = big.NewInt(1) + // BigKiByte is 1,024 bytes in bit.Ints + BigKiByte = (&big.Int{}).Mul(BigByte, bigIECExp) + // BigMiByte is 1,024 k bytes in bit.Ints + BigMiByte = (&big.Int{}).Mul(BigKiByte, bigIECExp) + // BigGiByte is 1,024 m bytes in bit.Ints + BigGiByte = (&big.Int{}).Mul(BigMiByte, bigIECExp) + // BigTiByte is 1,024 g bytes in bit.Ints + BigTiByte = (&big.Int{}).Mul(BigGiByte, bigIECExp) + // BigPiByte is 1,024 t bytes in bit.Ints + BigPiByte = (&big.Int{}).Mul(BigTiByte, bigIECExp) + // BigEiByte is 1,024 p bytes in bit.Ints + BigEiByte = (&big.Int{}).Mul(BigPiByte, bigIECExp) + // BigZiByte is 1,024 e bytes in bit.Ints + BigZiByte = (&big.Int{}).Mul(BigEiByte, bigIECExp) + // BigYiByte is 1,024 z bytes in bit.Ints + BigYiByte = (&big.Int{}).Mul(BigZiByte, bigIECExp) + // BigRiByte is 1,024 y bytes in bit.Ints + BigRiByte = (&big.Int{}).Mul(BigYiByte, bigIECExp) + // BigQiByte is 1,024 r bytes in bit.Ints + BigQiByte = (&big.Int{}).Mul(BigRiByte, bigIECExp) +) + +var ( + bigSIExp = big.NewInt(1000) + + // BigSIByte is one SI byte in big.Ints + BigSIByte = big.NewInt(1) + // BigKByte is 1,000 SI bytes in big.Ints + BigKByte = (&big.Int{}).Mul(BigSIByte, bigSIExp) + // BigMByte is 1,000 SI k bytes in big.Ints + BigMByte = (&big.Int{}).Mul(BigKByte, bigSIExp) + // BigGByte is 1,000 SI m bytes in big.Ints + BigGByte = (&big.Int{}).Mul(BigMByte, bigSIExp) + // BigTByte is 1,000 SI g bytes in big.Ints + BigTByte = (&big.Int{}).Mul(BigGByte, bigSIExp) + // BigPByte is 1,000 SI t bytes in big.Ints + BigPByte = (&big.Int{}).Mul(BigTByte, bigSIExp) + // BigEByte is 1,000 SI p bytes in big.Ints + BigEByte = (&big.Int{}).Mul(BigPByte, bigSIExp) + // BigZByte is 1,000 SI e bytes in big.Ints + BigZByte = (&big.Int{}).Mul(BigEByte, bigSIExp) + // BigYByte is 1,000 SI z bytes in big.Ints + BigYByte = (&big.Int{}).Mul(BigZByte, bigSIExp) + // BigRByte is 1,000 SI y bytes in big.Ints + BigRByte = (&big.Int{}).Mul(BigYByte, bigSIExp) + // BigQByte is 1,000 SI r bytes in big.Ints + BigQByte = (&big.Int{}).Mul(BigRByte, bigSIExp) +) + +var bigBytesSizeTable = map[string]*big.Int{ + "b": BigByte, + "kib": BigKiByte, + "kb": BigKByte, + "mib": BigMiByte, + "mb": BigMByte, + "gib": BigGiByte, + "gb": BigGByte, + "tib": BigTiByte, + "tb": BigTByte, + "pib": BigPiByte, + "pb": BigPByte, + "eib": BigEiByte, + "eb": BigEByte, + "zib": BigZiByte, + "zb": BigZByte, + "yib": BigYiByte, + "yb": BigYByte, + "rib": BigRiByte, + "rb": BigRByte, + "qib": BigQiByte, + "qb": BigQByte, + // Without suffix + "": BigByte, + "ki": BigKiByte, + "k": BigKByte, + "mi": BigMiByte, + "m": BigMByte, + "gi": BigGiByte, + "g": BigGByte, + "ti": BigTiByte, + "t": BigTByte, + "pi": BigPiByte, + "p": BigPByte, + "ei": BigEiByte, + "e": BigEByte, + "z": BigZByte, + "zi": BigZiByte, + "y": BigYByte, + "yi": BigYiByte, + "r": BigRByte, + "ri": BigRiByte, + "q": BigQByte, + "qi": BigQiByte, +} + +var ten = big.NewInt(10) + +func humanateBigBytes(s, base *big.Int, sizes []string) string { + if s.Cmp(ten) < 0 { + return fmt.Sprintf("%d B", s) + } + c := (&big.Int{}).Set(s) + val, mag := oomm(c, base, len(sizes)-1) + suffix := sizes[mag] + f := "%.0f %s" + if val < 10 { + f = "%.1f %s" + } + + return fmt.Sprintf(f, val, suffix) + +} + +// BigBytes produces a human readable representation of an SI size. +// +// See also: ParseBigBytes. +// +// BigBytes(82854982) -> 83 MB +func BigBytes(s *big.Int) string { + sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB", "RB", "QB"} + return humanateBigBytes(s, bigSIExp, sizes) +} + +// BigIBytes produces a human readable representation of an IEC size. +// +// See also: ParseBigBytes. +// +// BigIBytes(82854982) -> 79 MiB +func BigIBytes(s *big.Int) string { + sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "RiB", "QiB"} + return humanateBigBytes(s, bigIECExp, sizes) +} + +// ParseBigBytes parses a string representation of bytes into the number +// of bytes it represents. +// +// See also: BigBytes, BigIBytes. +// +// ParseBigBytes("42 MB") -> 42000000, nil +// ParseBigBytes("42 mib") -> 44040192, nil +func ParseBigBytes(s string) (*big.Int, error) { + lastDigit := 0 + hasComma := false + for _, r := range s { + if !(unicode.IsDigit(r) || r == '.' || r == ',') { + break + } + if r == ',' { + hasComma = true + } + lastDigit++ + } + + num := s[:lastDigit] + if hasComma { + num = strings.Replace(num, ",", "", -1) + } + + val := &big.Rat{} + _, err := fmt.Sscanf(num, "%f", val) + if err != nil { + return nil, err + } + + extra := strings.ToLower(strings.TrimSpace(s[lastDigit:])) + if m, ok := bigBytesSizeTable[extra]; ok { + mv := (&big.Rat{}).SetInt(m) + val.Mul(val, mv) + rv := &big.Int{} + rv.Div(val.Num(), val.Denom()) + return rv, nil + } + + return nil, fmt.Errorf("unhandled size name: %v", extra) +} diff --git a/vendor/github.com/dustin/go-humanize/bytes.go b/vendor/github.com/dustin/go-humanize/bytes.go new file mode 100644 index 0000000..0b498f4 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/bytes.go @@ -0,0 +1,143 @@ +package humanize + +import ( + "fmt" + "math" + "strconv" + "strings" + "unicode" +) + +// IEC Sizes. +// kibis of bits +const ( + Byte = 1 << (iota * 10) + KiByte + MiByte + GiByte + TiByte + PiByte + EiByte +) + +// SI Sizes. +const ( + IByte = 1 + KByte = IByte * 1000 + MByte = KByte * 1000 + GByte = MByte * 1000 + TByte = GByte * 1000 + PByte = TByte * 1000 + EByte = PByte * 1000 +) + +var bytesSizeTable = map[string]uint64{ + "b": Byte, + "kib": KiByte, + "kb": KByte, + "mib": MiByte, + "mb": MByte, + "gib": GiByte, + "gb": GByte, + "tib": TiByte, + "tb": TByte, + "pib": PiByte, + "pb": PByte, + "eib": EiByte, + "eb": EByte, + // Without suffix + "": Byte, + "ki": KiByte, + "k": KByte, + "mi": MiByte, + "m": MByte, + "gi": GiByte, + "g": GByte, + "ti": TiByte, + "t": TByte, + "pi": PiByte, + "p": PByte, + "ei": EiByte, + "e": EByte, +} + +func logn(n, b float64) float64 { + return math.Log(n) / math.Log(b) +} + +func humanateBytes(s uint64, base float64, sizes []string) string { + if s < 10 { + return fmt.Sprintf("%d B", s) + } + e := math.Floor(logn(float64(s), base)) + suffix := sizes[int(e)] + val := math.Floor(float64(s)/math.Pow(base, e)*10+0.5) / 10 + f := "%.0f %s" + if val < 10 { + f = "%.1f %s" + } + + return fmt.Sprintf(f, val, suffix) +} + +// Bytes produces a human readable representation of an SI size. +// +// See also: ParseBytes. +// +// Bytes(82854982) -> 83 MB +func Bytes(s uint64) string { + sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB"} + return humanateBytes(s, 1000, sizes) +} + +// IBytes produces a human readable representation of an IEC size. +// +// See also: ParseBytes. +// +// IBytes(82854982) -> 79 MiB +func IBytes(s uint64) string { + sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"} + return humanateBytes(s, 1024, sizes) +} + +// ParseBytes parses a string representation of bytes into the number +// of bytes it represents. +// +// See Also: Bytes, IBytes. +// +// ParseBytes("42 MB") -> 42000000, nil +// ParseBytes("42 mib") -> 44040192, nil +func ParseBytes(s string) (uint64, error) { + lastDigit := 0 + hasComma := false + for _, r := range s { + if !(unicode.IsDigit(r) || r == '.' || r == ',') { + break + } + if r == ',' { + hasComma = true + } + lastDigit++ + } + + num := s[:lastDigit] + if hasComma { + num = strings.Replace(num, ",", "", -1) + } + + f, err := strconv.ParseFloat(num, 64) + if err != nil { + return 0, err + } + + extra := strings.ToLower(strings.TrimSpace(s[lastDigit:])) + if m, ok := bytesSizeTable[extra]; ok { + f *= float64(m) + if f >= math.MaxUint64 { + return 0, fmt.Errorf("too large: %v", s) + } + return uint64(f), nil + } + + return 0, fmt.Errorf("unhandled size name: %v", extra) +} diff --git a/vendor/github.com/dustin/go-humanize/comma.go b/vendor/github.com/dustin/go-humanize/comma.go new file mode 100644 index 0000000..520ae3e --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/comma.go @@ -0,0 +1,116 @@ +package humanize + +import ( + "bytes" + "math" + "math/big" + "strconv" + "strings" +) + +// Comma produces a string form of the given number in base 10 with +// commas after every three orders of magnitude. +// +// e.g. Comma(834142) -> 834,142 +func Comma(v int64) string { + sign := "" + + // Min int64 can't be negated to a usable value, so it has to be special cased. + if v == math.MinInt64 { + return "-9,223,372,036,854,775,808" + } + + if v < 0 { + sign = "-" + v = 0 - v + } + + parts := []string{"", "", "", "", "", "", ""} + j := len(parts) - 1 + + for v > 999 { + parts[j] = strconv.FormatInt(v%1000, 10) + switch len(parts[j]) { + case 2: + parts[j] = "0" + parts[j] + case 1: + parts[j] = "00" + parts[j] + } + v = v / 1000 + j-- + } + parts[j] = strconv.Itoa(int(v)) + return sign + strings.Join(parts[j:], ",") +} + +// Commaf produces a string form of the given number in base 10 with +// commas after every three orders of magnitude. +// +// e.g. Commaf(834142.32) -> 834,142.32 +func Commaf(v float64) string { + buf := &bytes.Buffer{} + if v < 0 { + buf.Write([]byte{'-'}) + v = 0 - v + } + + comma := []byte{','} + + parts := strings.Split(strconv.FormatFloat(v, 'f', -1, 64), ".") + pos := 0 + if len(parts[0])%3 != 0 { + pos += len(parts[0]) % 3 + buf.WriteString(parts[0][:pos]) + buf.Write(comma) + } + for ; pos < len(parts[0]); pos += 3 { + buf.WriteString(parts[0][pos : pos+3]) + buf.Write(comma) + } + buf.Truncate(buf.Len() - 1) + + if len(parts) > 1 { + buf.Write([]byte{'.'}) + buf.WriteString(parts[1]) + } + return buf.String() +} + +// CommafWithDigits works like the Commaf but limits the resulting +// string to the given number of decimal places. +// +// e.g. CommafWithDigits(834142.32, 1) -> 834,142.3 +func CommafWithDigits(f float64, decimals int) string { + return stripTrailingDigits(Commaf(f), decimals) +} + +// BigComma produces a string form of the given big.Int in base 10 +// with commas after every three orders of magnitude. +func BigComma(b *big.Int) string { + sign := "" + if b.Sign() < 0 { + sign = "-" + b.Abs(b) + } + + athousand := big.NewInt(1000) + c := (&big.Int{}).Set(b) + _, m := oom(c, athousand) + parts := make([]string, m+1) + j := len(parts) - 1 + + mod := &big.Int{} + for b.Cmp(athousand) >= 0 { + b.DivMod(b, athousand, mod) + parts[j] = strconv.FormatInt(mod.Int64(), 10) + switch len(parts[j]) { + case 2: + parts[j] = "0" + parts[j] + case 1: + parts[j] = "00" + parts[j] + } + j-- + } + parts[j] = strconv.Itoa(int(b.Int64())) + return sign + strings.Join(parts[j:], ",") +} diff --git a/vendor/github.com/dustin/go-humanize/commaf.go b/vendor/github.com/dustin/go-humanize/commaf.go new file mode 100644 index 0000000..2bc83a0 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/commaf.go @@ -0,0 +1,41 @@ +//go:build go1.6 +// +build go1.6 + +package humanize + +import ( + "bytes" + "math/big" + "strings" +) + +// BigCommaf produces a string form of the given big.Float in base 10 +// with commas after every three orders of magnitude. +func BigCommaf(v *big.Float) string { + buf := &bytes.Buffer{} + if v.Sign() < 0 { + buf.Write([]byte{'-'}) + v.Abs(v) + } + + comma := []byte{','} + + parts := strings.Split(v.Text('f', -1), ".") + pos := 0 + if len(parts[0])%3 != 0 { + pos += len(parts[0]) % 3 + buf.WriteString(parts[0][:pos]) + buf.Write(comma) + } + for ; pos < len(parts[0]); pos += 3 { + buf.WriteString(parts[0][pos : pos+3]) + buf.Write(comma) + } + buf.Truncate(buf.Len() - 1) + + if len(parts) > 1 { + buf.Write([]byte{'.'}) + buf.WriteString(parts[1]) + } + return buf.String() +} diff --git a/vendor/github.com/dustin/go-humanize/ftoa.go b/vendor/github.com/dustin/go-humanize/ftoa.go new file mode 100644 index 0000000..bce923f --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/ftoa.go @@ -0,0 +1,49 @@ +package humanize + +import ( + "strconv" + "strings" +) + +func stripTrailingZeros(s string) string { + if !strings.ContainsRune(s, '.') { + return s + } + offset := len(s) - 1 + for offset > 0 { + if s[offset] == '.' { + offset-- + break + } + if s[offset] != '0' { + break + } + offset-- + } + return s[:offset+1] +} + +func stripTrailingDigits(s string, digits int) string { + if i := strings.Index(s, "."); i >= 0 { + if digits <= 0 { + return s[:i] + } + i++ + if i+digits >= len(s) { + return s + } + return s[:i+digits] + } + return s +} + +// Ftoa converts a float to a string with no trailing zeros. +func Ftoa(num float64) string { + return stripTrailingZeros(strconv.FormatFloat(num, 'f', 6, 64)) +} + +// FtoaWithDigits converts a float to a string but limits the resulting string +// to the given number of decimal places, and no trailing zeros. +func FtoaWithDigits(num float64, digits int) string { + return stripTrailingZeros(stripTrailingDigits(strconv.FormatFloat(num, 'f', 6, 64), digits)) +} diff --git a/vendor/github.com/dustin/go-humanize/humanize.go b/vendor/github.com/dustin/go-humanize/humanize.go new file mode 100644 index 0000000..a2c2da3 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/humanize.go @@ -0,0 +1,8 @@ +/* +Package humanize converts boring ugly numbers to human-friendly strings and back. + +Durations can be turned into strings such as "3 days ago", numbers +representing sizes like 82854982 into useful strings like, "83 MB" or +"79 MiB" (whichever you prefer). +*/ +package humanize diff --git a/vendor/github.com/dustin/go-humanize/number.go b/vendor/github.com/dustin/go-humanize/number.go new file mode 100644 index 0000000..6470d0d --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/number.go @@ -0,0 +1,192 @@ +package humanize + +/* +Slightly adapted from the source to fit go-humanize. + +Author: https://github.com/gorhill +Source: https://gist.github.com/gorhill/5285193 + +*/ + +import ( + "math" + "strconv" +) + +var ( + renderFloatPrecisionMultipliers = [...]float64{ + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, + 100000000, + 1000000000, + } + + renderFloatPrecisionRounders = [...]float64{ + 0.5, + 0.05, + 0.005, + 0.0005, + 0.00005, + 0.000005, + 0.0000005, + 0.00000005, + 0.000000005, + 0.0000000005, + } +) + +// FormatFloat produces a formatted number as string based on the following user-specified criteria: +// * thousands separator +// * decimal separator +// * decimal precision +// +// Usage: s := RenderFloat(format, n) +// The format parameter tells how to render the number n. +// +// See examples: http://play.golang.org/p/LXc1Ddm1lJ +// +// Examples of format strings, given n = 12345.6789: +// "#,###.##" => "12,345.67" +// "#,###." => "12,345" +// "#,###" => "12345,678" +// "#\u202F###,##" => "12 345,68" +// "#.###,###### => 12.345,678900 +// "" (aka default format) => 12,345.67 +// +// The highest precision allowed is 9 digits after the decimal symbol. +// There is also a version for integer number, FormatInteger(), +// which is convenient for calls within template. +func FormatFloat(format string, n float64) string { + // Special cases: + // NaN = "NaN" + // +Inf = "+Infinity" + // -Inf = "-Infinity" + if math.IsNaN(n) { + return "NaN" + } + if n > math.MaxFloat64 { + return "Infinity" + } + if n < (0.0 - math.MaxFloat64) { + return "-Infinity" + } + + // default format + precision := 2 + decimalStr := "." + thousandStr := "," + positiveStr := "" + negativeStr := "-" + + if len(format) > 0 { + format := []rune(format) + + // If there is an explicit format directive, + // then default values are these: + precision = 9 + thousandStr = "" + + // collect indices of meaningful formatting directives + formatIndx := []int{} + for i, char := range format { + if char != '#' && char != '0' { + formatIndx = append(formatIndx, i) + } + } + + if len(formatIndx) > 0 { + // Directive at index 0: + // Must be a '+' + // Raise an error if not the case + // index: 0123456789 + // +0.000,000 + // +000,000.0 + // +0000.00 + // +0000 + if formatIndx[0] == 0 { + if format[formatIndx[0]] != '+' { + panic("RenderFloat(): invalid positive sign directive") + } + positiveStr = "+" + formatIndx = formatIndx[1:] + } + + // Two directives: + // First is thousands separator + // Raise an error if not followed by 3-digit + // 0123456789 + // 0.000,000 + // 000,000.00 + if len(formatIndx) == 2 { + if (formatIndx[1] - formatIndx[0]) != 4 { + panic("RenderFloat(): thousands separator directive must be followed by 3 digit-specifiers") + } + thousandStr = string(format[formatIndx[0]]) + formatIndx = formatIndx[1:] + } + + // One directive: + // Directive is decimal separator + // The number of digit-specifier following the separator indicates wanted precision + // 0123456789 + // 0.00 + // 000,0000 + if len(formatIndx) == 1 { + decimalStr = string(format[formatIndx[0]]) + precision = len(format) - formatIndx[0] - 1 + } + } + } + + // generate sign part + var signStr string + if n >= 0.000000001 { + signStr = positiveStr + } else if n <= -0.000000001 { + signStr = negativeStr + n = -n + } else { + signStr = "" + n = 0.0 + } + + // split number into integer and fractional parts + intf, fracf := math.Modf(n + renderFloatPrecisionRounders[precision]) + + // generate integer part string + intStr := strconv.FormatInt(int64(intf), 10) + + // add thousand separator if required + if len(thousandStr) > 0 { + for i := len(intStr); i > 3; { + i -= 3 + intStr = intStr[:i] + thousandStr + intStr[i:] + } + } + + // no fractional part, we can leave now + if precision == 0 { + return signStr + intStr + } + + // generate fractional part + fracStr := strconv.Itoa(int(fracf * renderFloatPrecisionMultipliers[precision])) + // may need padding + if len(fracStr) < precision { + fracStr = "000000000000000"[:precision-len(fracStr)] + fracStr + } + + return signStr + intStr + decimalStr + fracStr +} + +// FormatInteger produces a formatted number as string. +// See FormatFloat. +func FormatInteger(format string, n int) string { + return FormatFloat(format, float64(n)) +} diff --git a/vendor/github.com/dustin/go-humanize/ordinals.go b/vendor/github.com/dustin/go-humanize/ordinals.go new file mode 100644 index 0000000..43d88a8 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/ordinals.go @@ -0,0 +1,25 @@ +package humanize + +import "strconv" + +// Ordinal gives you the input number in a rank/ordinal format. +// +// Ordinal(3) -> 3rd +func Ordinal(x int) string { + suffix := "th" + switch x % 10 { + case 1: + if x%100 != 11 { + suffix = "st" + } + case 2: + if x%100 != 12 { + suffix = "nd" + } + case 3: + if x%100 != 13 { + suffix = "rd" + } + } + return strconv.Itoa(x) + suffix +} diff --git a/vendor/github.com/dustin/go-humanize/si.go b/vendor/github.com/dustin/go-humanize/si.go new file mode 100644 index 0000000..8b85019 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/si.go @@ -0,0 +1,127 @@ +package humanize + +import ( + "errors" + "math" + "regexp" + "strconv" +) + +var siPrefixTable = map[float64]string{ + -30: "q", // quecto + -27: "r", // ronto + -24: "y", // yocto + -21: "z", // zepto + -18: "a", // atto + -15: "f", // femto + -12: "p", // pico + -9: "n", // nano + -6: "µ", // micro + -3: "m", // milli + 0: "", + 3: "k", // kilo + 6: "M", // mega + 9: "G", // giga + 12: "T", // tera + 15: "P", // peta + 18: "E", // exa + 21: "Z", // zetta + 24: "Y", // yotta + 27: "R", // ronna + 30: "Q", // quetta +} + +var revSIPrefixTable = revfmap(siPrefixTable) + +// revfmap reverses the map and precomputes the power multiplier +func revfmap(in map[float64]string) map[string]float64 { + rv := map[string]float64{} + for k, v := range in { + rv[v] = math.Pow(10, k) + } + return rv +} + +var riParseRegex *regexp.Regexp + +func init() { + ri := `^([\-0-9.]+)\s?([` + for _, v := range siPrefixTable { + ri += v + } + ri += `]?)(.*)` + + riParseRegex = regexp.MustCompile(ri) +} + +// ComputeSI finds the most appropriate SI prefix for the given number +// and returns the prefix along with the value adjusted to be within +// that prefix. +// +// See also: SI, ParseSI. +// +// e.g. ComputeSI(2.2345e-12) -> (2.2345, "p") +func ComputeSI(input float64) (float64, string) { + if input == 0 { + return 0, "" + } + mag := math.Abs(input) + exponent := math.Floor(logn(mag, 10)) + exponent = math.Floor(exponent/3) * 3 + + value := mag / math.Pow(10, exponent) + + // Handle special case where value is exactly 1000.0 + // Should return 1 M instead of 1000 k + if value == 1000.0 { + exponent += 3 + value = mag / math.Pow(10, exponent) + } + + value = math.Copysign(value, input) + + prefix := siPrefixTable[exponent] + return value, prefix +} + +// SI returns a string with default formatting. +// +// SI uses Ftoa to format float value, removing trailing zeros. +// +// See also: ComputeSI, ParseSI. +// +// e.g. SI(1000000, "B") -> 1 MB +// e.g. SI(2.2345e-12, "F") -> 2.2345 pF +func SI(input float64, unit string) string { + value, prefix := ComputeSI(input) + return Ftoa(value) + " " + prefix + unit +} + +// SIWithDigits works like SI but limits the resulting string to the +// given number of decimal places. +// +// e.g. SIWithDigits(1000000, 0, "B") -> 1 MB +// e.g. SIWithDigits(2.2345e-12, 2, "F") -> 2.23 pF +func SIWithDigits(input float64, decimals int, unit string) string { + value, prefix := ComputeSI(input) + return FtoaWithDigits(value, decimals) + " " + prefix + unit +} + +var errInvalid = errors.New("invalid input") + +// ParseSI parses an SI string back into the number and unit. +// +// See also: SI, ComputeSI. +// +// e.g. ParseSI("2.2345 pF") -> (2.2345e-12, "F", nil) +func ParseSI(input string) (float64, string, error) { + found := riParseRegex.FindStringSubmatch(input) + if len(found) != 4 { + return 0, "", errInvalid + } + mag := revSIPrefixTable[found[2]] + unit := found[3] + + base, err := strconv.ParseFloat(found[1], 64) + return base * mag, unit, err +} diff --git a/vendor/github.com/dustin/go-humanize/times.go b/vendor/github.com/dustin/go-humanize/times.go new file mode 100644 index 0000000..dd3fbf5 --- /dev/null +++ b/vendor/github.com/dustin/go-humanize/times.go @@ -0,0 +1,117 @@ +package humanize + +import ( + "fmt" + "math" + "sort" + "time" +) + +// Seconds-based time units +const ( + Day = 24 * time.Hour + Week = 7 * Day + Month = 30 * Day + Year = 12 * Month + LongTime = 37 * Year +) + +// Time formats a time into a relative string. +// +// Time(someT) -> "3 weeks ago" +func Time(then time.Time) string { + return RelTime(then, time.Now(), "ago", "from now") +} + +// A RelTimeMagnitude struct contains a relative time point at which +// the relative format of time will switch to a new format string. A +// slice of these in ascending order by their "D" field is passed to +// CustomRelTime to format durations. +// +// The Format field is a string that may contain a "%s" which will be +// replaced with the appropriate signed label (e.g. "ago" or "from +// now") and a "%d" that will be replaced by the quantity. +// +// The DivBy field is the amount of time the time difference must be +// divided by in order to display correctly. +// +// e.g. if D is 2*time.Minute and you want to display "%d minutes %s" +// DivBy should be time.Minute so whatever the duration is will be +// expressed in minutes. +type RelTimeMagnitude struct { + D time.Duration + Format string + DivBy time.Duration +} + +var defaultMagnitudes = []RelTimeMagnitude{ + {time.Second, "now", time.Second}, + {2 * time.Second, "1 second %s", 1}, + {time.Minute, "%d seconds %s", time.Second}, + {2 * time.Minute, "1 minute %s", 1}, + {time.Hour, "%d minutes %s", time.Minute}, + {2 * time.Hour, "1 hour %s", 1}, + {Day, "%d hours %s", time.Hour}, + {2 * Day, "1 day %s", 1}, + {Week, "%d days %s", Day}, + {2 * Week, "1 week %s", 1}, + {Month, "%d weeks %s", Week}, + {2 * Month, "1 month %s", 1}, + {Year, "%d months %s", Month}, + {18 * Month, "1 year %s", 1}, + {2 * Year, "2 years %s", 1}, + {LongTime, "%d years %s", Year}, + {math.MaxInt64, "a long while %s", 1}, +} + +// RelTime formats a time into a relative string. +// +// It takes two times and two labels. In addition to the generic time +// delta string (e.g. 5 minutes), the labels are used applied so that +// the label corresponding to the smaller time is applied. +// +// RelTime(timeInPast, timeInFuture, "earlier", "later") -> "3 weeks earlier" +func RelTime(a, b time.Time, albl, blbl string) string { + return CustomRelTime(a, b, albl, blbl, defaultMagnitudes) +} + +// CustomRelTime formats a time into a relative string. +// +// It takes two times two labels and a table of relative time formats. +// In addition to the generic time delta string (e.g. 5 minutes), the +// labels are used applied so that the label corresponding to the +// smaller time is applied. +func CustomRelTime(a, b time.Time, albl, blbl string, magnitudes []RelTimeMagnitude) string { + lbl := albl + diff := b.Sub(a) + + if a.After(b) { + lbl = blbl + diff = a.Sub(b) + } + + n := sort.Search(len(magnitudes), func(i int) bool { + return magnitudes[i].D > diff + }) + + if n >= len(magnitudes) { + n = len(magnitudes) - 1 + } + mag := magnitudes[n] + args := []interface{}{} + escaped := false + for _, ch := range mag.Format { + if escaped { + switch ch { + case 's': + args = append(args, lbl) + case 'd': + args = append(args, diff/mag.DivBy) + } + escaped = false + } else { + escaped = ch == '%' + } + } + return fmt.Sprintf(mag.Format, args...) +} diff --git a/vendor/github.com/paulmach/orb/CHANGELOG.md b/vendor/github.com/paulmach/orb/CHANGELOG.md index 5691ca1..1783940 100644 --- a/vendor/github.com/paulmach/orb/CHANGELOG.md +++ b/vendor/github.com/paulmach/orb/CHANGELOG.md @@ -2,6 +2,65 @@ All notable changes to this project will be documented in this file. +## [v0.11.1](https://github.com/paulmach/orb/compare/v0.11.0...v0.11.1) - 2024-01-29 + +### Fixed + +- geojson: `null` json into non-pointer Feature/FeatureCollection will set them to empty by [@paulmach](https://github.com/paulmach)in https://github.com/paulmach/orb/pull/145 + +## [v0.11.0](https://github.com/paulmach/orb/compare/v0.10.0...v0.11.0) - 2024-01-11 + +### Fixed + +- quadtree: InBoundMatching does not properly accept passed-in buffer by [@nirmal-vuppuluri](https://github.com/nirmal-vuppuluri) in https://github.com/paulmach/orb/pull/139 +- mvt: Do not swallow error cause by [@m-pavel](https://github.com/m-pavel) in https://github.com/paulmach/orb/pull/137 + +### Changed + +- simplify: Visvalingam, by default, keeps 3 points for "areas" by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/140 +- encoding/mvt: skip encoding of features will nil geometry by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/141 +- encoding/wkt: improve unmarshalling performance by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/142 + +## [v0.10.0](https://github.com/paulmach/orb/compare/v0.9.2...v0.10.0) - 2023-07-16 + +### Added + +- add ChildrenInZoomRange method to maptile.Tile by [@peitili](https://github.com/peitili) in https://github.com/paulmach/orb/pull/133 + +## [v0.9.2](https://github.com/paulmach/orb/compare/v0.9.1...v0.9.2) - 2023-05-04 + +### Fixed + +- encoding/wkt: better handling/validation of missing parens by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/131 + +## [v0.9.1](https://github.com/paulmach/orb/compare/v0.9.0...v0.9.1) - 2023-04-26 + +### Fixed + +- Bump up mongo driver to 1.11.4 by [@m-pavel](https://github.com/m-pavel) in https://github.com/paulmach/orb/pull/129 +- encoding/wkt: split strings with regexp by [@m-pavel](https://github.com/m-pavel) in https://github.com/paulmach/orb/pull/128 + +## [v0.9.0](https://github.com/paulmach/orb/compare/v0.8.0...v0.9.0) - 2023-02-19 + +### Added + +- geojson: marshal/unmarshal BSON [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/123 + +## [v0.8.0](https://github.com/paulmach/orb/compare/v0.7.1...v0.8.0) - 2023-01-05 + +### Fixed + +- quadtree: fix bad sort due to pointer allocation issue by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/115 +- geojson: ensure geometry unmarshal errors get returned by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/117 +- encoding/mvt: remove use of crypto/md5 to compare marshalling in tests by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/118 +- encoding/wkt: fix panic for some invalid wkt data by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/119 + +### Other + +- fix typo by [@rubenpoppe](https://github.com/rubenpoppe) in https://github.com/paulmach/orb/pull/107 +- Fixed a small twister in README.md by [@Timahawk](https://github.com/Timahawk) in https://github.com/paulmach/orb/pull/108 +- update github ci to use go 1.19 by [@paulmach](https://github.com/paulmach) in https://github.com/paulmach/orb/pull/116 + ## [v0.7.1](https://github.com/paulmach/orb/compare/v0.7.0...v0.7.1) - 2022-05-16 No changes diff --git a/vendor/github.com/paulmach/orb/README.md b/vendor/github.com/paulmach/orb/README.md index 8d0e515..03fe463 100644 --- a/vendor/github.com/paulmach/orb/README.md +++ b/vendor/github.com/paulmach/orb/README.md @@ -2,7 +2,7 @@ Package `orb` defines a set of types for working with 2d geo and planar/projected geometric data in Golang. There are a set of sub-packages that use these types to do interesting things. -They each provider their own README with extra info. +They each provide their own README with extra info. ## Interesting features @@ -111,6 +111,8 @@ The library supports third party "encoding/json" replacements such [github.com/json-iterator/go](https://github.com/json-iterator/go). See the [geojson](geojson) readme for more details. +The types also support BSON so they can be used directly when working with MongoDB. + ## Mapbox Vector Tiles The [encoding/mvt](encoding/mvt) sub-package implements Marshalling and @@ -138,10 +140,10 @@ layers.Simplify(simplify.DouglasPeucker(1.0)) layers.RemoveEmpty(1.0, 2.0) // encoding using the Mapbox Vector Tile protobuf encoding. -data, err := layers.Marshal() // this data is NOT gzipped. +data, err := mvt.Marshal(layers) // this data is NOT gzipped. // Sometimes MVT data is stored and transfered gzip compressed. In that case: -data, err := layers.MarshalGzipped() +data, err := mvt.MarshalGzipped(layers) ``` ## Decoding WKB/EWKB from a database query diff --git a/vendor/github.com/paulmach/orb/encoding/wkt/unmarshal.go b/vendor/github.com/paulmach/orb/encoding/wkt/unmarshal.go index de259fd..d9659d4 100644 --- a/vendor/github.com/paulmach/orb/encoding/wkt/unmarshal.go +++ b/vendor/github.com/paulmach/orb/encoding/wkt/unmarshal.go @@ -1,7 +1,9 @@ package wkt import ( + "bytes" "errors" + "regexp" "strconv" "strings" @@ -18,138 +20,391 @@ var ( // ErrUnsupportedGeometry is returned when geometry type is not supported by this lib. ErrUnsupportedGeometry = errors.New("wkt: unsupported geometry") + + doubleParen = regexp.MustCompile(`\)[\s|\t]*\)([\s|\t]*,[\s|\t]*)\([\s|\t]*\(`) + singleParen = regexp.MustCompile(`\)([\s|\t]*,[\s|\t]*)\(`) ) // UnmarshalPoint returns the point represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a point. -func UnmarshalPoint(s string) (p orb.Point, err error) { - geom, err := Unmarshal(s) +func UnmarshalPoint(s string) (orb.Point, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("POINT")) { + return orb.Point{}, ErrIncorrectGeometry + } + + return unmarshalPoint(s) +} + +func unmarshalPoint(s string) (orb.Point, error) { + s, err := trimSpaceBrackets(s[5:]) + if err != nil { + return orb.Point{}, err + } + + tp, err := parsePoint(s) if err != nil { return orb.Point{}, err } - g, ok := geom.(orb.Point) + + return tp, nil +} + +// parsePoint pase point by (x y) +func parsePoint(s string) (p orb.Point, err error) { + one, two, ok := cut(s, " ") if !ok { - return orb.Point{}, ErrIncorrectGeometry + return orb.Point{}, ErrNotWKT } - return g, nil + + x, err := strconv.ParseFloat(one, 64) + if err != nil { + return orb.Point{}, ErrNotWKT + } + + y, err := strconv.ParseFloat(two, 64) + if err != nil { + return orb.Point{}, ErrNotWKT + } + + return orb.Point{x, y}, nil } // UnmarshalMultiPoint returns the multi-point represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a multi-point. -func UnmarshalMultiPoint(s string) (p orb.MultiPoint, err error) { - geom, err := Unmarshal(s) +func UnmarshalMultiPoint(s string) (orb.MultiPoint, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("MULTIPOINT")) { + return nil, ErrIncorrectGeometry + } + + return unmarshalMultiPoint(s) +} + +func unmarshalMultiPoint(s string) (orb.MultiPoint, error) { + if strings.EqualFold(s, "MULTIPOINT EMPTY") { + return orb.MultiPoint{}, nil + } + + s, err := trimSpaceBrackets(s[10:]) if err != nil { return nil, err } - g, ok := geom.(orb.MultiPoint) - if !ok { - return nil, ErrIncorrectGeometry + count := strings.Count(s, ",") + mp := make(orb.MultiPoint, 0, count+1) + + err = splitOnComma(s, func(p string) error { + p, err := trimSpaceBrackets(p) + if err != nil { + return err + } + + tp, err := parsePoint(p) + if err != nil { + return err + } + + mp = append(mp, tp) + return nil + }) + if err != nil { + return nil, err } - return g, nil + + return mp, nil } // UnmarshalLineString returns the linestring represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a linestring. -func UnmarshalLineString(s string) (p orb.LineString, err error) { - geom, err := Unmarshal(s) +func UnmarshalLineString(s string) (orb.LineString, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("LINESTRING")) { + return nil, ErrIncorrectGeometry + } + + return unmarshalLineString(s) +} + +func unmarshalLineString(s string) (orb.LineString, error) { + if strings.EqualFold(s, "LINESTRING EMPTY") { + return orb.LineString{}, nil + } + + s, err := trimSpaceBrackets(s[10:]) if err != nil { return nil, err } - g, ok := geom.(orb.LineString) - if !ok { - return nil, ErrIncorrectGeometry + + count := strings.Count(s, ",") + ls := make(orb.LineString, 0, count+1) + + err = splitOnComma(s, func(p string) error { + tp, err := parsePoint(p) + if err != nil { + return err + } + + ls = append(ls, tp) + return nil + }) + if err != nil { + return nil, err } - return g, nil + + return ls, nil } // UnmarshalMultiLineString returns the multi-linestring represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a multi-linestring. -func UnmarshalMultiLineString(s string) (p orb.MultiLineString, err error) { - geom, err := Unmarshal(s) +func UnmarshalMultiLineString(s string) (orb.MultiLineString, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("MULTILINESTRING")) { + return nil, ErrIncorrectGeometry + } + + return unmarshalMultiLineString(s) +} + +func unmarshalMultiLineString(s string) (orb.MultiLineString, error) { + if strings.EqualFold(s, "MULTILINESTRING EMPTY") { + return orb.MultiLineString{}, nil + } + + s, err := trimSpaceBrackets(s[15:]) if err != nil { return nil, err } - g, ok := geom.(orb.MultiLineString) - if !ok { - return nil, ErrIncorrectGeometry + + var tmls orb.MultiLineString + err = splitByRegexpYield( + s, + singleParen, + func(i int) { + tmls = make(orb.MultiLineString, 0, i) + }, + func(ls string) error { + ls, err := trimSpaceBrackets(ls) + if err != nil { + return err + } + + count := strings.Count(ls, ",") + tls := make(orb.LineString, 0, count+1) + + err = splitOnComma(ls, func(p string) error { + tp, err := parsePoint(p) + if err != nil { + return err + } + + tls = append(tls, tp) + return nil + }) + if err != nil { + return err + } + + tmls = append(tmls, tls) + return nil + }, + ) + if err != nil { + return nil, err } - return g, nil + + return tmls, nil } // UnmarshalPolygon returns the polygon represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a polygon. -func UnmarshalPolygon(s string) (p orb.Polygon, err error) { - geom, err := Unmarshal(s) +func UnmarshalPolygon(s string) (orb.Polygon, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("POLYGON")) { + return nil, ErrIncorrectGeometry + } + + return unmarshalPolygon(s) +} + +func unmarshalPolygon(s string) (orb.Polygon, error) { + if strings.EqualFold(s, "POLYGON EMPTY") { + return orb.Polygon{}, nil + } + + s, err := trimSpaceBrackets(s[7:]) if err != nil { return nil, err } - g, ok := geom.(orb.Polygon) - if !ok { - return nil, ErrIncorrectGeometry + + var poly orb.Polygon + err = splitByRegexpYield( + s, + singleParen, + func(i int) { + poly = make(orb.Polygon, 0, i) + }, + func(r string) error { + r, err := trimSpaceBrackets(r) + if err != nil { + return err + } + + count := strings.Count(r, ",") + ring := make(orb.Ring, 0, count+1) + + err = splitOnComma(r, func(p string) error { + tp, err := parsePoint(p) + if err != nil { + return err + } + ring = append(ring, tp) + return nil + }) + if err != nil { + return err + } + + poly = append(poly, ring) + return nil + }, + ) + if err != nil { + return nil, err } - return g, nil + + return poly, nil } // UnmarshalMultiPolygon returns the multi-polygon represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a multi-polygon. -func UnmarshalMultiPolygon(s string) (p orb.MultiPolygon, err error) { - geom, err := Unmarshal(s) +func UnmarshalMultiPolygon(s string) (orb.MultiPolygon, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("MULTIPOLYGON")) { + return nil, ErrIncorrectGeometry + } + + return unmarshalMultiPolygon(s) +} + +func unmarshalMultiPolygon(s string) (orb.MultiPolygon, error) { + if strings.EqualFold(s, "MULTIPOLYGON EMPTY") { + return orb.MultiPolygon{}, nil + } + + s, err := trimSpaceBrackets(s[12:]) if err != nil { return nil, err } - g, ok := geom.(orb.MultiPolygon) - if !ok { - return nil, ErrIncorrectGeometry + + var mpoly orb.MultiPolygon + err = splitByRegexpYield( + s, + doubleParen, + func(i int) { + mpoly = make(orb.MultiPolygon, 0, i) + }, + func(poly string) error { + poly, err := trimSpaceBrackets(poly) + if err != nil { + return err + } + + var tpoly orb.Polygon + err = splitByRegexpYield( + poly, + singleParen, + func(i int) { + tpoly = make(orb.Polygon, 0, i) + }, + func(r string) error { + r, err := trimSpaceBrackets(r) + if err != nil { + return err + } + + count := strings.Count(r, ",") + tr := make(orb.Ring, 0, count+1) + + err = splitOnComma(r, func(s string) error { + tp, err := parsePoint(s) + if err != nil { + return err + } + + tr = append(tr, tp) + return nil + }) + if err != nil { + return err + } + + tpoly = append(tpoly, tr) + return nil + }, + ) + if err != nil { + return err + } + + mpoly = append(mpoly, tpoly) + return nil + }, + ) + if err != nil { + return nil, err } - return g, nil + + return mpoly, nil } // UnmarshalCollection returns the geometry collection represented by the wkt string. // Will return ErrIncorrectGeometry if the wkt is not a geometry collection. -func UnmarshalCollection(s string) (p orb.Collection, err error) { - geom, err := Unmarshal(s) - if err != nil { - return orb.Collection{}, err - } - g, ok := geom.(orb.Collection) - if !ok { +func UnmarshalCollection(s string) (orb.Collection, error) { + s = trimSpace(s) + prefix := upperPrefix(s) + if !bytes.HasPrefix(prefix, []byte("GEOMETRYCOLLECTION")) { return nil, ErrIncorrectGeometry } - return g, nil + + return unmarshalCollection(s) } -// trimSpaceBrackets trim space and brackets -func trimSpaceBrackets(s string) string { - s = strings.Trim(s, " ") - if s[0] == '(' { - s = s[1:] - } - if s[len(s)-1] == ')' { - s = s[:len(s)-1] +func unmarshalCollection(s string) (orb.Collection, error) { + if strings.EqualFold(s, "GEOMETRYCOLLECTION EMPTY") { + return orb.Collection{}, nil } - return strings.Trim(s, " ") -} - -// parsePoint pase point by (x y) -func parsePoint(s string) (p orb.Point, err error) { - ps := strings.Split(s, " ") - if len(ps) != 2 { - return orb.Point{}, ErrNotWKT + if len(s) == 18 { // just GEOMETRYCOLLECTION + return nil, ErrNotWKT } - x, err := strconv.ParseFloat(ps[0], 64) - if err != nil { - return orb.Point{}, err + geometries := splitGeometryCollection(s[18:]) + if len(geometries) == 0 { + return orb.Collection{}, nil } - y, err := strconv.ParseFloat(ps[1], 64) - if err != nil { - return orb.Point{}, err + c := make(orb.Collection, 0, len(geometries)) + for _, g := range geometries { + if len(g) == 0 { + continue + } + + tg, err := Unmarshal(g) + if err != nil { + return nil, err + } + + c = append(c, tg) } - return orb.Point{x, y}, nil + return c, nil } // splitGeometryCollection split GEOMETRYCOLLECTION to more geometry @@ -162,7 +417,7 @@ func splitGeometryCollection(s string) (r []string) { stack = append(stack, v) continue } - if v >= 'A' && v < 'Z' { + if ('A' <= v && v < 'Z') || ('a' <= v && v < 'z') { t := string(stack) r = append(r, t[:len(t)-1]) stack = make([]rune, 0) @@ -179,140 +434,177 @@ func splitGeometryCollection(s string) (r []string) { } // Unmarshal return a geometry by parsing the WKT string. -func Unmarshal(s string) (geom orb.Geometry, err error) { - s = strings.ToUpper(strings.Trim(s, " ")) - switch { - case strings.Contains(s, "GEOMETRYCOLLECTION"): - if s == "GEOMETRYCOLLECTION EMPTY" { - return orb.Collection{}, nil - } - s = strings.Replace(s, "GEOMETRYCOLLECTION", "", -1) - c := orb.Collection{} - ms := splitGeometryCollection(s) - if len(ms) == 0 { - return nil, err - } - for _, v := range ms { - if len(v) == 0 { - continue - } - g, err := Unmarshal(v) - if err != nil { - return nil, err - } - c = append(c, g) - } - geom = c +func Unmarshal(s string) (orb.Geometry, error) { + var ( + g orb.Geometry + err error + ) - case strings.Contains(s, "MULTIPOINT"): - if s == "MULTIPOINT EMPTY" { - return orb.MultiPoint{}, nil - } - s = strings.Replace(s, "MULTIPOINT", "", -1) - s = trimSpaceBrackets(s) - ps := strings.Split(s, ",") - mp := orb.MultiPoint{} - for _, p := range ps { - tp, err := parsePoint(trimSpaceBrackets(p)) - if err != nil { - return nil, err - } - mp = append(mp, tp) - } - geom = mp + s = trimSpace(s) + prefix := upperPrefix(s) + + if bytes.HasPrefix(prefix, []byte("POINT")) { + g, err = unmarshalPoint(s) + } else if bytes.HasPrefix(prefix, []byte("LINESTRING")) { + g, err = unmarshalLineString(s) + } else if bytes.HasPrefix(prefix, []byte("POLYGON")) { + g, err = unmarshalPolygon(s) + } else if bytes.HasPrefix(prefix, []byte("MULTIPOINT")) { + g, err = unmarshalMultiPoint(s) + } else if bytes.HasPrefix(prefix, []byte("MULTILINESTRING")) { + g, err = unmarshalMultiLineString(s) + } else if bytes.HasPrefix(prefix, []byte("MULTIPOLYGON")) { + g, err = unmarshalMultiPolygon(s) + } else if bytes.HasPrefix(prefix, []byte("GEOMETRYCOLLECTION")) { + g, err = unmarshalCollection(s) + } else { + return nil, ErrUnsupportedGeometry + } - case strings.Contains(s, "POINT"): - s = strings.Replace(s, "POINT", "", -1) - tp, err := parsePoint(trimSpaceBrackets(s)) + if err != nil { + return nil, err + } + + return g, nil +} + +// splitByRegexpYield splits the input by the regexp. The first callback can +// be used to initialize an array with the size of the result, the second +// is the callback with the matches. +// We use a yield function because it was faster/used less memory than +// allocating an array of the results. +func splitByRegexpYield(s string, re *regexp.Regexp, set func(int), yield func(string) error) error { + indexes := re.FindAllStringSubmatchIndex(s, -1) + set(len(indexes) + 1) + start := 0 + for _, element := range indexes { + err := yield(s[start:element[2]]) if err != nil { - return nil, err + return err } - geom = tp + start = element[3] + } - case strings.Contains(s, "MULTILINESTRING"): - if s == "MULTILINESTRING EMPTY" { - return orb.MultiLineString{}, nil - } - s = strings.Replace(s, "MULTILINESTRING", "", -1) - ml := orb.MultiLineString{} - for _, l := range strings.Split(trimSpaceBrackets(s), "),(") { - tl := orb.LineString{} - for _, p := range strings.Split(trimSpaceBrackets(l), ",") { - tp, err := parsePoint(trimSpaceBrackets(p)) - if err != nil { - return nil, err - } - tl = append(tl, tp) + return yield(s[start:]) +} + +// splitOnComma is optimized to split on the regex [\s|\t|\n]*,[\s|\t|\n]* +// i.e. comma with possible spaces on each side. e.g. ' , ' +// We use a yield function because it was faster/used less memory than +// allocating an array of the results. +func splitOnComma(s string, yield func(s string) error) error { + // in WKT points are separtated by commas, coordinates in points are separted by spaces + // e.g. 1 2,3 4,5 6,7 81 2,5 4 + // we want to split this and find each point. + + // at is right after the previous space-comma-space match. + // once a space-comma-space match is found, we go from 'at' to the start + // of the match, that's the split that needs to be returned. + var at int + + var start int // the start of a space-comma-space section + + // a space starts a section, we need to see a comma for it to be a valid section + var sawSpace, sawComma bool + for i := 0; i < len(s); i++ { + if s[i] == ',' { + if !sawSpace { + sawSpace = true + start = i } - ml = append(ml, tl) + sawComma = true + continue } - geom = ml - case strings.Contains(s, "LINESTRING"): - if s == "LINESTRING EMPTY" { - return orb.LineString{}, nil + if v := s[i]; v == ' ' || v == '\t' || v == '\n' { + if !sawSpace { + sawSpace = true + start = i + } + continue } - s = strings.Replace(s, "LINESTRING", "", -1) - s = trimSpaceBrackets(s) - ps := strings.Split(s, ",") - ls := orb.LineString{} - for _, p := range ps { - tp, err := parsePoint(trimSpaceBrackets(p)) + + if sawComma { + err := yield(s[at:start]) if err != nil { - return nil, err + return err } - ls = append(ls, tp) + at = i } - geom = ls + sawSpace = false + sawComma = false + } - case strings.Contains(s, "MULTIPOLYGON"): - if s == "MULTIPOLYGON EMPTY" { - return orb.MultiPolygon{}, nil - } - s = strings.Replace(s, "MULTIPOLYGON", "", -1) - mpol := orb.MultiPolygon{} - for _, ps := range strings.Split(trimSpaceBrackets(s), ")),((") { - pol := orb.Polygon{} - for _, ls := range strings.Split(trimSpaceBrackets(ps), "),(") { - ring := orb.Ring{} - for _, p := range strings.Split(ls, ",") { - tp, err := parsePoint(trimSpaceBrackets(p)) - if err != nil { - return nil, err - } - ring = append(ring, tp) - } - pol = append(pol, ring) - } - mpol = append(mpol, pol) + return yield(s[at:]) +} + +// trimSpaceBrackets trim space and brackets +func trimSpaceBrackets(s string) (string, error) { + s = trimSpace(s) + if len(s) == 0 { + return s, nil + } + + if s[0] == '(' { + s = s[1:] + } else { + return "", ErrNotWKT + } + + if s[len(s)-1] == ')' { + s = s[:len(s)-1] + } else { + return "", ErrNotWKT + } + + return trimSpace(s), nil +} + +func trimSpace(s string) string { + if len(s) == 0 { + return s + } + + var start, end int + + for start = 0; start < len(s); start++ { + if v := s[start]; v != ' ' && v != '\t' && v != '\n' { + break } - geom = mpol + } - case strings.Contains(s, "POLYGON"): - if s == "POLYGON EMPTY" { - return orb.Polygon{}, nil + for end = len(s) - 1; end >= 0; end-- { + if v := s[end]; v != ' ' && v != '\t' && v != '\n' { + break } - s = strings.Replace(s, "POLYGON", "", -1) - s = trimSpaceBrackets(s) - - rs := strings.Split(s, "),(") - pol := make(orb.Polygon, 0, len(rs)) - for _, r := range rs { - ps := strings.Split(trimSpaceBrackets(r), ",") - ring := orb.Ring{} - for _, p := range ps { - tp, err := parsePoint(trimSpaceBrackets(p)) - if err != nil { - return nil, err - } - ring = append(ring, tp) - } - pol = append(pol, ring) + } + + if start >= end { + return "" + } + + return s[start : end+1] +} + +// gets the ToUpper case of the first 20 chars. +// This is to determin the type without doing a full strings.ToUpper +func upperPrefix(s string) []byte { + prefix := make([]byte, 20) + for i := 0; i < 20 && i < len(s); i++ { + if 'a' <= s[i] && s[i] <= 'z' { + prefix[i] = s[i] - ('a' - 'A') + } else { + prefix[i] = s[i] } - geom = pol - default: - return nil, ErrUnsupportedGeometry } - return + return prefix +} + +// coppied here from strings.Cut so we don't require go1.18 +func cut(s, sep string) (before, after string, found bool) { + if i := strings.Index(s, sep); i >= 0 { + return s[:i], s[i+len(sep):], true + } + return s, "", false } diff --git a/vendor/github.com/paulmach/orb/geojson/README.md b/vendor/github.com/paulmach/orb/geojson/README.md index 2a5c8c7..07ca932 100644 --- a/vendor/github.com/paulmach/orb/geojson/README.md +++ b/vendor/github.com/paulmach/orb/geojson/README.md @@ -2,10 +2,15 @@ This package **encodes and decodes** [GeoJSON](http://geojson.org/) into Go structs using the geometries in the [orb](https://github.com/paulmach/orb) package. -Supports both the [json.Marshaler](http://golang.org/pkg/encoding/json/#Marshaler) and -[json.Unmarshaler](http://golang.org/pkg/encoding/json/#Unmarshaler) interfaces. + +Supports both the [json.Marshaler](https://pkg.go.dev/encoding/json#Marshaler) and +[json.Unmarshaler](https://pkg.go.dev/encoding/json#Unmarshaler) interfaces. The package also provides helper functions such as `UnmarshalFeatureCollection` and `UnmarshalFeature`. +The types also support BSON via the [bson.Marshaler](https://pkg.go.dev/go.mongodb.org/mongo-driver/bson#Marshaler) and +[bson.Unmarshaler](https://pkg.go.dev/go.mongodb.org/mongo-driver/bson#Unmarshaler) interfaces. +These types can be used directly when working with MongoDB. + ## Unmarshalling (JSON -> Go) ```go diff --git a/vendor/github.com/paulmach/orb/geojson/feature.go b/vendor/github.com/paulmach/orb/geojson/feature.go index 7230678..0dd246d 100644 --- a/vendor/github.com/paulmach/orb/geojson/feature.go +++ b/vendor/github.com/paulmach/orb/geojson/feature.go @@ -1,9 +1,11 @@ package geojson import ( + "bytes" "fmt" "github.com/paulmach/orb" + "go.mongodb.org/mongo-driver/bson" ) // A Feature corresponds to GeoJSON feature object @@ -37,7 +39,18 @@ var _ orb.Pointer = &Feature{} // It will handle the encoding of all the child geometries. // Alternately one can call json.Marshal(f) directly for the same result. func (f Feature) MarshalJSON() ([]byte, error) { - jf := &jsonFeature{ + return marshalJSON(newFeatureDoc(&f)) +} + +// MarshalBSON converts the feature object into the proper JSON. +// It will handle the encoding of all the child geometries. +// Alternately one can call json.Marshal(f) directly for the same result. +func (f Feature) MarshalBSON() ([]byte, error) { + return bson.Marshal(newFeatureDoc(&f)) +} + +func newFeatureDoc(f *Feature) *featureDoc { + doc := &featureDoc{ ID: f.ID, Type: "Feature", Properties: f.Properties, @@ -45,11 +58,11 @@ func (f Feature) MarshalJSON() ([]byte, error) { Geometry: NewGeometry(f.Geometry), } - if len(jf.Properties) == 0 { - jf.Properties = nil + if len(doc.Properties) == 0 { + doc.Properties = nil } - return marshalJSON(jf) + return doc } // UnmarshalFeature decodes the data into a GeoJSON feature. @@ -67,39 +80,59 @@ func UnmarshalFeature(data []byte) (*Feature, error) { // UnmarshalJSON handles the correct unmarshalling of the data // into the orb.Geometry types. func (f *Feature) UnmarshalJSON(data []byte) error { - jf := &jsonFeature{} - err := unmarshalJSON(data, &jf) + if bytes.Equal(data, []byte(`null`)) { + *f = Feature{} + return nil + } + + doc := &featureDoc{} + err := unmarshalJSON(data, &doc) if err != nil { return err } - if jf.Type != "Feature" { - return fmt.Errorf("geojson: not a feature: type=%s", jf.Type) + return featureUnmarshalFinish(doc, f) +} + +// UnmarshalBSON will unmarshal a BSON document created with bson.Marshal. +func (f *Feature) UnmarshalBSON(data []byte) error { + doc := &featureDoc{} + err := bson.Unmarshal(data, &doc) + if err != nil { + return err + } + + return featureUnmarshalFinish(doc, f) +} + +func featureUnmarshalFinish(doc *featureDoc, f *Feature) error { + if doc.Type != "Feature" { + return fmt.Errorf("geojson: not a feature: type=%s", doc.Type) } var g orb.Geometry - if jf.Geometry != nil { - if jf.Geometry.Coordinates == nil && jf.Geometry.Geometries == nil { + if doc.Geometry != nil { + if doc.Geometry.Coordinates == nil && doc.Geometry.Geometries == nil { return ErrInvalidGeometry } - g = jf.Geometry.Geometry() + g = doc.Geometry.Geometry() } *f = Feature{ - ID: jf.ID, - Type: jf.Type, - Properties: jf.Properties, - BBox: jf.BBox, + ID: doc.ID, + Type: doc.Type, + Properties: doc.Properties, + BBox: doc.BBox, Geometry: g, } return nil } -type jsonFeature struct { - ID interface{} `json:"id,omitempty"` - Type string `json:"type"` - BBox BBox `json:"bbox,omitempty"` - Geometry *Geometry `json:"geometry"` - Properties Properties `json:"properties"` +type featureDoc struct { + ID interface{} `json:"id,omitempty" bson:"id"` + Type string `json:"type" bson:"type"` + BBox BBox `json:"bbox,omitempty" bson:"bbox,omitempty"` + Geometry *Geometry `json:"geometry" bson:"geometry"` + Properties Properties `json:"properties" bson:"properties"` } diff --git a/vendor/github.com/paulmach/orb/geojson/feature_collection.go b/vendor/github.com/paulmach/orb/geojson/feature_collection.go index 113c8ee..0235bc5 100644 --- a/vendor/github.com/paulmach/orb/geojson/feature_collection.go +++ b/vendor/github.com/paulmach/orb/geojson/feature_collection.go @@ -7,7 +7,10 @@ json.Unmarshaler interfaces as well as helper functions such as package geojson import ( + "bytes" "fmt" + + "go.mongodb.org/mongo-driver/bson" ) const featureCollection = "FeatureCollection" @@ -44,6 +47,21 @@ func (fc *FeatureCollection) Append(feature *Feature) *FeatureCollection { // Items in the ExtraMembers map will be included in the base of the // feature collection object. func (fc FeatureCollection) MarshalJSON() ([]byte, error) { + m := newFeatureCollectionDoc(fc) + return marshalJSON(m) +} + +// MarshalBSON converts the feature collection object into a BSON document +// represented by bytes. It will handle the encoding of all the child features +// and geometries. +// Items in the ExtraMembers map will be included in the base of the +// feature collection object. +func (fc FeatureCollection) MarshalBSON() ([]byte, error) { + m := newFeatureCollectionDoc(fc) + return bson.Marshal(m) +} + +func newFeatureCollectionDoc(fc FeatureCollection) map[string]interface{} { var tmp map[string]interface{} if fc.ExtraMembers != nil { tmp = fc.ExtraMembers.Clone() @@ -62,12 +80,17 @@ func (fc FeatureCollection) MarshalJSON() ([]byte, error) { tmp["features"] = fc.Features } - return marshalJSON(tmp) + return tmp } // UnmarshalJSON decodes the data into a GeoJSON feature collection. // Extra/foreign members will be put into the `ExtraMembers` attribute. func (fc *FeatureCollection) UnmarshalJSON(data []byte) error { + if bytes.Equal(data, []byte(`null`)) { + *fc = FeatureCollection{} + return nil + } + tmp := make(map[string]nocopyRawMessage, 4) err := unmarshalJSON(data, &tmp) @@ -114,6 +137,52 @@ func (fc *FeatureCollection) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal a BSON document created with bson.Marshal. +// Extra/foreign members will be put into the `ExtraMembers` attribute. +func (fc *FeatureCollection) UnmarshalBSON(data []byte) error { + tmp := make(map[string]bson.RawValue, 4) + + err := bson.Unmarshal(data, &tmp) + if err != nil { + return err + } + + *fc = FeatureCollection{} + for key, value := range tmp { + switch key { + case "type": + fc.Type, _ = bson.RawValue(value).StringValueOK() + case "bbox": + err := value.Unmarshal(&fc.BBox) + if err != nil { + return err + } + case "features": + err := value.Unmarshal(&fc.Features) + if err != nil { + return err + } + default: + if fc.ExtraMembers == nil { + fc.ExtraMembers = Properties{} + } + + var val interface{} + err := value.Unmarshal(&val) + if err != nil { + return err + } + fc.ExtraMembers[key] = val + } + } + + if fc.Type != featureCollection { + return fmt.Errorf("geojson: not a feature collection: type=%s", fc.Type) + } + + return nil +} + // UnmarshalFeatureCollection decodes the data into a GeoJSON feature collection. // Alternately one can call json.Unmarshal(fc) directly for the same result. func UnmarshalFeatureCollection(data []byte) (*FeatureCollection, error) { diff --git a/vendor/github.com/paulmach/orb/geojson/geometry.go b/vendor/github.com/paulmach/orb/geojson/geometry.go index 4574cd0..1524521 100644 --- a/vendor/github.com/paulmach/orb/geojson/geometry.go +++ b/vendor/github.com/paulmach/orb/geojson/geometry.go @@ -4,6 +4,8 @@ import ( "errors" "github.com/paulmach/orb" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/bsontype" ) // ErrInvalidGeometry will be returned if a the json of the geometry is invalid. @@ -43,7 +45,7 @@ func NewGeometry(g orb.Geometry) *Geometry { // Geometry returns the orb.Geometry for the geojson Geometry. // This will convert the "Geometries" into a orb.Collection if applicable. -func (g Geometry) Geometry() orb.Geometry { +func (g *Geometry) Geometry() orb.Geometry { if g.Coordinates != nil { return g.Coordinates } @@ -55,13 +57,40 @@ func (g Geometry) Geometry() orb.Geometry { return c } -// MarshalJSON will marshal the geometry into the correct json structure. -func (g Geometry) MarshalJSON() ([]byte, error) { +// MarshalJSON will marshal the geometry into the correct JSON structure. +func (g *Geometry) MarshalJSON() ([]byte, error) { if g.Coordinates == nil && len(g.Geometries) == 0 { return []byte(`null`), nil } - ng := &jsonGeometryMarshall{} + ng := newGeometryMarshallDoc(g) + return marshalJSON(ng) +} + +// MarshalBSON will convert the geometry into a BSON document with the structure +// of a GeoJSON Geometry. This function is used when the geometry is the top level +// document to be marshalled. +func (g *Geometry) MarshalBSON() ([]byte, error) { + ng := newGeometryMarshallDoc(g) + return bson.Marshal(ng) +} + +// MarshalBSONValue will marshal the geometry into a BSON value +// with the structure of a GeoJSON Geometry. +func (g *Geometry) MarshalBSONValue() (bsontype.Type, []byte, error) { + // implementing MarshalBSONValue allows us to marshal into a null value + // needed to match behavior with the JSON marshalling. + + if g.Coordinates == nil && len(g.Geometries) == 0 { + return bsontype.Null, nil, nil + } + + ng := newGeometryMarshallDoc(g) + return bson.MarshalValue(ng) +} + +func newGeometryMarshallDoc(g *Geometry) *geometryMarshallDoc { + ng := &geometryMarshallDoc{} switch g := g.Coordinates.(type) { case orb.Ring: ng.Coordinates = orb.Polygon{g} @@ -86,10 +115,10 @@ func (g Geometry) MarshalJSON() ([]byte, error) { ng.Type = orb.Collection{}.GeoJSONType() } - return marshalJSON(ng) + return ng } -// UnmarshalGeometry decodes the data into a GeoJSON feature. +// UnmarshalGeometry decodes the JSON data into a GeoJSON feature. // Alternately one can call json.Unmarshal(g) directly for the same result. func UnmarshalGeometry(data []byte) (*Geometry, error) { g := &Geometry{} @@ -101,7 +130,7 @@ func UnmarshalGeometry(data []byte) (*Geometry, error) { return g, nil } -// UnmarshalJSON will unmarshal the correct geometry from the json structure. +// UnmarshalJSON will unmarshal the correct geometry from the JSON structure. func (g *Geometry) UnmarshalJSON(data []byte) error { jg := &jsonGeometry{} err := unmarshalJSON(data, jg) @@ -113,26 +142,44 @@ func (g *Geometry) UnmarshalJSON(data []byte) error { case "Point": p := orb.Point{} err = unmarshalJSON(jg.Coordinates, &p) + if err != nil { + return err + } g.Coordinates = p case "MultiPoint": mp := orb.MultiPoint{} err = unmarshalJSON(jg.Coordinates, &mp) + if err != nil { + return err + } g.Coordinates = mp case "LineString": ls := orb.LineString{} err = unmarshalJSON(jg.Coordinates, &ls) + if err != nil { + return err + } g.Coordinates = ls case "MultiLineString": mls := orb.MultiLineString{} err = unmarshalJSON(jg.Coordinates, &mls) + if err != nil { + return err + } g.Coordinates = mls case "Polygon": p := orb.Polygon{} err = unmarshalJSON(jg.Coordinates, &p) + if err != nil { + return err + } g.Coordinates = p case "MultiPolygon": mp := orb.MultiPolygon{} err = unmarshalJSON(jg.Coordinates, &mp) + if err != nil { + return err + } g.Coordinates = mp case "GeometryCollection": g.Geometries = jg.Geometries @@ -145,6 +192,69 @@ func (g *Geometry) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal a BSON document created with bson.Marshal. +func (g *Geometry) UnmarshalBSON(data []byte) error { + bg := &bsonGeometry{} + err := bson.Unmarshal(data, bg) + if err != nil { + return err + } + + switch bg.Type { + case "Point": + p := orb.Point{} + err = bg.Coordinates.Unmarshal(&p) + if err != nil { + return err + } + g.Coordinates = p + case "MultiPoint": + mp := orb.MultiPoint{} + err = bg.Coordinates.Unmarshal(&mp) + if err != nil { + return err + } + g.Coordinates = mp + case "LineString": + ls := orb.LineString{} + + err = bg.Coordinates.Unmarshal(&ls) + if err != nil { + return err + } + g.Coordinates = ls + case "MultiLineString": + mls := orb.MultiLineString{} + err = bg.Coordinates.Unmarshal(&mls) + if err != nil { + return err + } + g.Coordinates = mls + case "Polygon": + p := orb.Polygon{} + err = bg.Coordinates.Unmarshal(&p) + if err != nil { + return err + } + g.Coordinates = p + case "MultiPolygon": + mp := orb.MultiPolygon{} + err = bg.Coordinates.Unmarshal(&mp) + if err != nil { + return err + } + g.Coordinates = mp + case "GeometryCollection": + g.Geometries = bg.Geometries + default: + return ErrInvalidGeometry + } + + g.Type = g.Geometry().GeoJSONType() + + return nil +} + // A Point is a helper type that will marshal to/from a GeoJSON Point geometry. type Point orb.Point @@ -155,7 +265,12 @@ func (p Point) Geometry() orb.Geometry { // MarshalJSON will convert the Point into a GeoJSON Point geometry. func (p Point) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.Point(p)}) + return marshalJSON(&Geometry{Coordinates: orb.Point(p)}) +} + +// MarshalBSON will convert the Point into a BSON value following the GeoJSON Point structure. +func (p Point) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.Point(p)}) } // UnmarshalJSON will unmarshal the GeoJSON Point geometry. @@ -175,6 +290,23 @@ func (p *Point) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal GeoJSON Point geometry. +func (p *Point) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + point, ok := g.Coordinates.(orb.Point) + if !ok { + return errors.New("geojson: not a Point type") + } + + *p = Point(point) + return nil +} + // A MultiPoint is a helper type that will marshal to/from a GeoJSON MultiPoint geometry. type MultiPoint orb.MultiPoint @@ -185,7 +317,12 @@ func (mp MultiPoint) Geometry() orb.Geometry { // MarshalJSON will convert the MultiPoint into a GeoJSON MultiPoint geometry. func (mp MultiPoint) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.MultiPoint(mp)}) + return marshalJSON(&Geometry{Coordinates: orb.MultiPoint(mp)}) +} + +// MarshalBSON will convert the MultiPoint into a GeoJSON MultiPoint geometry BSON. +func (mp MultiPoint) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.MultiPoint(mp)}) } // UnmarshalJSON will unmarshal the GeoJSON MultiPoint geometry. @@ -205,6 +342,23 @@ func (mp *MultiPoint) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal the GeoJSON MultiPoint geometry. +func (mp *MultiPoint) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + multiPoint, ok := g.Coordinates.(orb.MultiPoint) + if !ok { + return errors.New("geojson: not a MultiPoint type") + } + + *mp = MultiPoint(multiPoint) + return nil +} + // A LineString is a helper type that will marshal to/from a GeoJSON LineString geometry. type LineString orb.LineString @@ -215,7 +369,12 @@ func (ls LineString) Geometry() orb.Geometry { // MarshalJSON will convert the LineString into a GeoJSON LineString geometry. func (ls LineString) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.LineString(ls)}) + return marshalJSON(&Geometry{Coordinates: orb.LineString(ls)}) +} + +// MarshalBSON will convert the LineString into a GeoJSON LineString geometry. +func (ls LineString) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.LineString(ls)}) } // UnmarshalJSON will unmarshal the GeoJSON MultiPoint geometry. @@ -235,6 +394,23 @@ func (ls *LineString) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal the GeoJSON MultiPoint geometry. +func (ls *LineString) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + lineString, ok := g.Coordinates.(orb.LineString) + if !ok { + return errors.New("geojson: not a LineString type") + } + + *ls = LineString(lineString) + return nil +} + // A MultiLineString is a helper type that will marshal to/from a GeoJSON MultiLineString geometry. type MultiLineString orb.MultiLineString @@ -245,7 +421,12 @@ func (mls MultiLineString) Geometry() orb.Geometry { // MarshalJSON will convert the MultiLineString into a GeoJSON MultiLineString geometry. func (mls MultiLineString) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.MultiLineString(mls)}) + return marshalJSON(&Geometry{Coordinates: orb.MultiLineString(mls)}) +} + +// MarshalBSON will convert the MultiLineString into a GeoJSON MultiLineString geometry. +func (mls MultiLineString) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.MultiLineString(mls)}) } // UnmarshalJSON will unmarshal the GeoJSON MultiPoint geometry. @@ -265,6 +446,23 @@ func (mls *MultiLineString) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal the GeoJSON MultiPoint geometry. +func (mls *MultiLineString) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + multilineString, ok := g.Coordinates.(orb.MultiLineString) + if !ok { + return errors.New("geojson: not a MultiLineString type") + } + + *mls = MultiLineString(multilineString) + return nil +} + // A Polygon is a helper type that will marshal to/from a GeoJSON Polygon geometry. type Polygon orb.Polygon @@ -275,7 +473,12 @@ func (p Polygon) Geometry() orb.Geometry { // MarshalJSON will convert the Polygon into a GeoJSON Polygon geometry. func (p Polygon) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.Polygon(p)}) + return marshalJSON(&Geometry{Coordinates: orb.Polygon(p)}) +} + +// MarshalBSON will convert the Polygon into a GeoJSON Polygon geometry. +func (p Polygon) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.Polygon(p)}) } // UnmarshalJSON will unmarshal the GeoJSON Polygon geometry. @@ -295,6 +498,23 @@ func (p *Polygon) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal the GeoJSON Polygon geometry. +func (p *Polygon) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + polygon, ok := g.Coordinates.(orb.Polygon) + if !ok { + return errors.New("geojson: not a Polygon type") + } + + *p = Polygon(polygon) + return nil +} + // A MultiPolygon is a helper type that will marshal to/from a GeoJSON MultiPolygon geometry. type MultiPolygon orb.MultiPolygon @@ -305,7 +525,12 @@ func (mp MultiPolygon) Geometry() orb.Geometry { // MarshalJSON will convert the MultiPolygon into a GeoJSON MultiPolygon geometry. func (mp MultiPolygon) MarshalJSON() ([]byte, error) { - return marshalJSON(Geometry{Coordinates: orb.MultiPolygon(mp)}) + return marshalJSON(&Geometry{Coordinates: orb.MultiPolygon(mp)}) +} + +// MarshalBSON will convert the MultiPolygon into a GeoJSON MultiPolygon geometry. +func (mp MultiPolygon) MarshalBSON() ([]byte, error) { + return bson.Marshal(&Geometry{Coordinates: orb.MultiPolygon(mp)}) } // UnmarshalJSON will unmarshal the GeoJSON MultiPolygon geometry. @@ -325,14 +550,37 @@ func (mp *MultiPolygon) UnmarshalJSON(data []byte) error { return nil } +// UnmarshalBSON will unmarshal the GeoJSON MultiPolygon geometry. +func (mp *MultiPolygon) UnmarshalBSON(data []byte) error { + g := &Geometry{} + err := bson.Unmarshal(data, &g) + if err != nil { + return err + } + + multiPolygon, ok := g.Coordinates.(orb.MultiPolygon) + if !ok { + return errors.New("geojson: not a MultiPolygon type") + } + + *mp = MultiPolygon(multiPolygon) + return nil +} + +type bsonGeometry struct { + Type string `json:"type" bson:"type"` + Coordinates bson.RawValue `json:"coordinates" bson:"coordinates"` + Geometries []*Geometry `json:"geometries,omitempty" bson:"geometries"` +} + type jsonGeometry struct { Type string `json:"type"` Coordinates nocopyRawMessage `json:"coordinates"` Geometries []*Geometry `json:"geometries,omitempty"` } -type jsonGeometryMarshall struct { - Type string `json:"type"` - Coordinates orb.Geometry `json:"coordinates,omitempty"` - Geometries []*Geometry `json:"geometries,omitempty"` +type geometryMarshallDoc struct { + Type string `json:"type" bson:"type"` + Coordinates orb.Geometry `json:"coordinates,omitempty" bson:"coordinates,omitempty"` + Geometries []*Geometry `json:"geometries,omitempty" bson:"geometries,omitempty"` } diff --git a/vendor/github.com/remyoudompheng/bigfft/README b/vendor/github.com/remyoudompheng/bigfft/README index 303c617..0fcd39d 100644 --- a/vendor/github.com/remyoudompheng/bigfft/README +++ b/vendor/github.com/remyoudompheng/bigfft/README @@ -1,3 +1,14 @@ +This library is a toy proof-of-concept implementation of the +well-known Schonhage-Strassen method for multiplying integers. +It is not expected to have a real life usecase outside number +theory computations, nor is it expected to be used in any production +system. + +If you are using it in your project, you may want to carefully +examine the actual requirement or problem you are trying to solve. + +# Comparison with the standard library and GMP + Benchmarking math/big vs. bigfft Number size old ns/op new ns/op delta diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_386.s b/vendor/github.com/remyoudompheng/bigfft/arith_386.s deleted file mode 100644 index cc50a01..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_386.s +++ /dev/null @@ -1,36 +0,0 @@ -// Trampolines to math/big assembly implementations. - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - JMP math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -TEXT ·subVV(SB),NOSPLIT,$0 - JMP math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - JMP math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -TEXT ·subVW(SB),NOSPLIT,$0 - JMP math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - JMP math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - JMP math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - JMP math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - JMP math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_amd64.s b/vendor/github.com/remyoudompheng/bigfft/arith_amd64.s deleted file mode 100644 index 0b79335..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_amd64.s +++ /dev/null @@ -1,38 +0,0 @@ -// Trampolines to math/big assembly implementations. - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - JMP math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -// (same as addVV except for SBBQ instead of ADCQ and label names) -TEXT ·subVV(SB),NOSPLIT,$0 - JMP math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - JMP math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -// (same as addVW except for SUBQ/SBBQ instead of ADDQ/ADCQ and label names) -TEXT ·subVW(SB),NOSPLIT,$0 - JMP math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - JMP math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - JMP math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - JMP math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - JMP math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_arm.s b/vendor/github.com/remyoudompheng/bigfft/arith_arm.s deleted file mode 100644 index 0ed60f5..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_arm.s +++ /dev/null @@ -1,36 +0,0 @@ -// Trampolines to math/big assembly implementations. - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - B math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -TEXT ·subVV(SB),NOSPLIT,$0 - B math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - B math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -TEXT ·subVW(SB),NOSPLIT,$0 - B math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - B math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - B math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - B math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - B math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_arm64.s b/vendor/github.com/remyoudompheng/bigfft/arith_arm64.s deleted file mode 100644 index 0ed60f5..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_arm64.s +++ /dev/null @@ -1,36 +0,0 @@ -// Trampolines to math/big assembly implementations. - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - B math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -TEXT ·subVV(SB),NOSPLIT,$0 - B math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - B math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -TEXT ·subVW(SB),NOSPLIT,$0 - B math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - B math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - B math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - B math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - B math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_decl.go b/vendor/github.com/remyoudompheng/bigfft/arith_decl.go index 7659b01..96937df 100644 --- a/vendor/github.com/remyoudompheng/bigfft/arith_decl.go +++ b/vendor/github.com/remyoudompheng/bigfft/arith_decl.go @@ -4,13 +4,30 @@ package bigfft -import . "math/big" +import ( + "math/big" + _ "unsafe" +) -// implemented in arith_$GOARCH.s +type Word = big.Word + +//go:linkname addVV math/big.addVV func addVV(z, x, y []Word) (c Word) + +//go:linkname subVV math/big.subVV func subVV(z, x, y []Word) (c Word) + +//go:linkname addVW math/big.addVW func addVW(z, x []Word, y Word) (c Word) + +//go:linkname subVW math/big.subVW func subVW(z, x []Word, y Word) (c Word) + +//go:linkname shlVU math/big.shlVU func shlVU(z, x []Word, s uint) (c Word) + +//go:linkname mulAddVWW math/big.mulAddVWW func mulAddVWW(z, x []Word, y, r Word) (c Word) + +//go:linkname addMulVVW math/big.addMulVVW func addMulVVW(z, x []Word, y Word) (c Word) diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_mips64x.s b/vendor/github.com/remyoudompheng/bigfft/arith_mips64x.s deleted file mode 100644 index 8244388..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_mips64x.s +++ /dev/null @@ -1,40 +0,0 @@ -// Trampolines to math/big assembly implementations. - -// +build mips64 mips64le - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - JMP math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -// (same as addVV except for SBBQ instead of ADCQ and label names) -TEXT ·subVV(SB),NOSPLIT,$0 - JMP math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - JMP math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -// (same as addVW except for SUBQ/SBBQ instead of ADDQ/ADCQ and label names) -TEXT ·subVW(SB),NOSPLIT,$0 - JMP math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - JMP math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - JMP math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - JMP math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - JMP math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_mipsx.s b/vendor/github.com/remyoudompheng/bigfft/arith_mipsx.s deleted file mode 100644 index 6c0e92e..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_mipsx.s +++ /dev/null @@ -1,40 +0,0 @@ -// Trampolines to math/big assembly implementations. - -// +build mips mipsle - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - JMP math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -// (same as addVV except for SBBQ instead of ADCQ and label names) -TEXT ·subVV(SB),NOSPLIT,$0 - JMP math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - JMP math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -// (same as addVW except for SUBQ/SBBQ instead of ADDQ/ADCQ and label names) -TEXT ·subVW(SB),NOSPLIT,$0 - JMP math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - JMP math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - JMP math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - JMP math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - JMP math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_ppc64x.s b/vendor/github.com/remyoudompheng/bigfft/arith_ppc64x.s deleted file mode 100644 index 16c7f15..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_ppc64x.s +++ /dev/null @@ -1,38 +0,0 @@ -// Trampolines to math/big assembly implementations. - -// +build ppc64 ppc64le - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - BR math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -TEXT ·subVV(SB),NOSPLIT,$0 - BR math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - BR math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -TEXT ·subVW(SB),NOSPLIT,$0 - BR math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - BR math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - BR math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - BR math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - BR math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/remyoudompheng/bigfft/arith_s390x.s b/vendor/github.com/remyoudompheng/bigfft/arith_s390x.s deleted file mode 100644 index f72ab05..0000000 --- a/vendor/github.com/remyoudompheng/bigfft/arith_s390x.s +++ /dev/null @@ -1,37 +0,0 @@ - -// Trampolines to math/big assembly implementations. - -#include "textflag.h" - -// func addVV(z, x, y []Word) (c Word) -TEXT ·addVV(SB),NOSPLIT,$0 - BR math∕big·addVV(SB) - -// func subVV(z, x, y []Word) (c Word) -TEXT ·subVV(SB),NOSPLIT,$0 - BR math∕big·subVV(SB) - -// func addVW(z, x []Word, y Word) (c Word) -TEXT ·addVW(SB),NOSPLIT,$0 - BR math∕big·addVW(SB) - -// func subVW(z, x []Word, y Word) (c Word) -TEXT ·subVW(SB),NOSPLIT,$0 - BR math∕big·subVW(SB) - -// func shlVU(z, x []Word, s uint) (c Word) -TEXT ·shlVU(SB),NOSPLIT,$0 - BR math∕big·shlVU(SB) - -// func shrVU(z, x []Word, s uint) (c Word) -TEXT ·shrVU(SB),NOSPLIT,$0 - BR math∕big·shrVU(SB) - -// func mulAddVWW(z, x []Word, y, r Word) (c Word) -TEXT ·mulAddVWW(SB),NOSPLIT,$0 - BR math∕big·mulAddVWW(SB) - -// func addMulVVW(z, x []Word, y Word) (c Word) -TEXT ·addMulVVW(SB),NOSPLIT,$0 - BR math∕big·addMulVVW(SB) - diff --git a/vendor/github.com/tidwall/gjson/README.md b/vendor/github.com/tidwall/gjson/README.md index c8db11f..96b2e4d 100644 --- a/vendor/github.com/tidwall/gjson/README.md +++ b/vendor/github.com/tidwall/gjson/README.md @@ -211,6 +211,7 @@ There are currently the following built-in modifiers: - `@tostr`: Converts json to a string. Wraps a json string. - `@fromstr`: Converts a string from json. Unwraps a json string. - `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db). +- `@dig`: Search for a value without providing its entire path. See [e8e87f2](https://github.com/tidwall/gjson/commit/e8e87f2a00dc41f3aba5631094e21f59a8cf8cbf). ### Modifier arguments @@ -426,16 +427,6 @@ if result.Index > 0 { This is a best-effort no allocation sub slice of the original json. This method utilizes the `result.Index` field, which is the position of the raw data in the original json. It's possible that the value of `result.Index` equals zero, in which case the `result.Raw` is converted to a `[]byte`. -## Get multiple values at once - -The `GetMany` function can be used to get multiple values at the same time. - -```go -results := gjson.GetMany(json, "name.first", "name.last", "age") -``` - -The return value is a `[]Result`, which will always contain exactly the same number of items as the input paths. - ## Performance Benchmarks of GJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/), diff --git a/vendor/github.com/tidwall/gjson/SYNTAX.md b/vendor/github.com/tidwall/gjson/SYNTAX.md index 7a9b6a2..6721d7f 100644 --- a/vendor/github.com/tidwall/gjson/SYNTAX.md +++ b/vendor/github.com/tidwall/gjson/SYNTAX.md @@ -137,12 +137,21 @@ next major release.* The `~` (tilde) operator will convert a value to a boolean before comparison. +Supported tilde comparison type are: + +``` +~true Converts true-ish values to true +~false Converts false-ish and non-existent values to true +~null Converts null and non-existent values to true +~* Converts any existing value to true +``` + For example, using the following JSON: ```json { "vals": [ - { "a": 1, "b": true }, + { "a": 1, "b": "data" }, { "a": 2, "b": true }, { "a": 3, "b": false }, { "a": 4, "b": "0" }, @@ -157,15 +166,23 @@ For example, using the following JSON: } ``` -You can now query for all true(ish) or false(ish) values: +To query for all true-ish or false-ish values: ``` -vals.#(b==~true)#.a >> [1,2,6,7,8] +vals.#(b==~true)#.a >> [2,6,7,8] vals.#(b==~false)#.a >> [3,4,5,9,10,11] ``` The last value which was non-existent is treated as `false` +To query for null and explicit value existence: + +``` +vals.#(b==~null)#.a >> [10,11] +vals.#(b==~*)#.a >> [1,2,3,4,5,6,7,8,9,10] +vals.#(b!=~*)#.a >> [11] +``` + ### Dot vs Pipe The `.` is standard separator, but it's also possible to use a `|`. @@ -241,6 +258,7 @@ There are currently the following built-in modifiers: - `@tostr`: Converts json to a string. Wraps a json string. - `@fromstr`: Converts a string from json. Unwraps a json string. - `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db). +- `@dig`: Search for a value without providing its entire path. See [e8e87f2](https://github.com/tidwall/gjson/commit/e8e87f2a00dc41f3aba5631094e21f59a8cf8cbf). #### Modifier arguments diff --git a/vendor/github.com/tidwall/gjson/gjson.go b/vendor/github.com/tidwall/gjson/gjson.go index 53cbd23..7949825 100644 --- a/vendor/github.com/tidwall/gjson/gjson.go +++ b/vendor/github.com/tidwall/gjson/gjson.go @@ -645,9 +645,9 @@ func tostr(json string) (raw string, str string) { // Exists returns true if value exists. // -// if gjson.Get(json, "name.last").Exists(){ -// println("value exists") -// } +// if gjson.Get(json, "name.last").Exists(){ +// println("value exists") +// } func (t Result) Exists() bool { return t.Type != Null || len(t.Raw) != 0 } @@ -661,7 +661,6 @@ func (t Result) Exists() bool { // nil, for JSON null // map[string]interface{}, for JSON objects // []interface{}, for JSON arrays -// func (t Result) Value() interface{} { if t.Type == String { return t.Str @@ -826,19 +825,28 @@ func parseArrayPath(path string) (r arrayPathResult) { } // splitQuery takes a query and splits it into three parts: -// path, op, middle, and right. +// +// path, op, middle, and right. +// // So for this query: -// #(first_name=="Murphy").last +// +// #(first_name=="Murphy").last +// // Becomes -// first_name # path -// =="Murphy" # middle -// .last # right +// +// first_name # path +// =="Murphy" # middle +// .last # right +// // Or, -// #(service_roles.#(=="one")).cap +// +// #(service_roles.#(=="one")).cap +// // Becomes -// service_roles.#(=="one") # path -// # middle -// .cap # right +// +// service_roles.#(=="one") # path +// # middle +// .cap # right func parseQuery(query string) ( path, op, value, remain string, i int, vesc, ok bool, ) { @@ -1251,15 +1259,74 @@ func matchLimit(str, pattern string) bool { return matched } +func falseish(t Result) bool { + switch t.Type { + case Null: + return true + case False: + return true + case String: + b, err := strconv.ParseBool(strings.ToLower(t.Str)) + if err != nil { + return false + } + return !b + case Number: + return t.Num == 0 + default: + return false + } +} + +func trueish(t Result) bool { + switch t.Type { + case True: + return true + case String: + b, err := strconv.ParseBool(strings.ToLower(t.Str)) + if err != nil { + return false + } + return b + case Number: + return t.Num != 0 + default: + return false + } +} + +func nullish(t Result) bool { + return t.Type == Null +} + func queryMatches(rp *arrayPathResult, value Result) bool { rpv := rp.query.value - if len(rpv) > 0 && rpv[0] == '~' { - // convert to bool - rpv = rpv[1:] - if value.Bool() { - value = Result{Type: True} - } else { - value = Result{Type: False} + if len(rpv) > 0 { + if rpv[0] == '~' { + // convert to bool + rpv = rpv[1:] + var ish, ok bool + switch rpv { + case "*": + ish, ok = value.Exists(), true + case "null": + ish, ok = nullish(value), true + case "true": + ish, ok = trueish(value), true + case "false": + ish, ok = falseish(value), true + } + if ok { + rpv = "true" + if ish { + value = Result{Type: True} + } else { + value = Result{Type: False} + } + } else { + rpv = "" + value = Result{} + } } } if !value.Exists() { @@ -1918,23 +1985,23 @@ type parseContext struct { // the '#' character. // The dot and wildcard character can be escaped with '\'. // -// { -// "name": {"first": "Tom", "last": "Anderson"}, -// "age":37, -// "children": ["Sara","Alex","Jack"], -// "friends": [ -// {"first": "James", "last": "Murphy"}, -// {"first": "Roger", "last": "Craig"} -// ] -// } -// "name.last" >> "Anderson" -// "age" >> 37 -// "children" >> ["Sara","Alex","Jack"] -// "children.#" >> 3 -// "children.1" >> "Alex" -// "child*.2" >> "Jack" -// "c?ildren.0" >> "Sara" -// "friends.#.first" >> ["James","Roger"] +// { +// "name": {"first": "Tom", "last": "Anderson"}, +// "age":37, +// "children": ["Sara","Alex","Jack"], +// "friends": [ +// {"first": "James", "last": "Murphy"}, +// {"first": "Roger", "last": "Craig"} +// ] +// } +// "name.last" >> "Anderson" +// "age" >> 37 +// "children" >> ["Sara","Alex","Jack"] +// "children.#" >> 3 +// "children.1" >> "Alex" +// "child*.2" >> "Jack" +// "c?ildren.0" >> "Sara" +// "friends.#.first" >> ["James","Roger"] // // This function expects that the json is well-formed, and does not validate. // Invalid json will not panic, but it may return back unexpected results. @@ -2126,8 +2193,7 @@ func unescape(json string) string { // The caseSensitive paramater is used when the tokens are Strings. // The order when comparing two different type is: // -// Null < False < Number < String < True < JSON -// +// Null < False < Number < String < True < JSON func (t Result) Less(token Result, caseSensitive bool) bool { if t.Type < token.Type { return true @@ -2556,11 +2622,10 @@ func validnull(data []byte, i int) (outi int, ok bool) { // Valid returns true if the input is valid json. // -// if !gjson.Valid(json) { -// return errors.New("invalid json") -// } -// value := gjson.Get(json, "name.last") -// +// if !gjson.Valid(json) { +// return errors.New("invalid json") +// } +// value := gjson.Get(json, "name.last") func Valid(json string) bool { _, ok := validpayload(stringBytes(json), 0) return ok @@ -2568,13 +2633,12 @@ func Valid(json string) bool { // ValidBytes returns true if the input is valid json. // -// if !gjson.Valid(json) { -// return errors.New("invalid json") -// } -// value := gjson.Get(json, "name.last") +// if !gjson.Valid(json) { +// return errors.New("invalid json") +// } +// value := gjson.Get(json, "name.last") // // If working with bytes, this method preferred over ValidBytes(string(data)) -// func ValidBytes(json []byte) bool { _, ok := validpayload(json, 0) return ok @@ -2690,6 +2754,7 @@ func execModifier(json, path string) (pathOut, res string, ok bool) { var parsedArgs bool switch pathOut[0] { case '{', '[', '"': + // json arg res := Parse(pathOut) if res.Exists() { args = squash(pathOut) @@ -2698,14 +2763,20 @@ func execModifier(json, path string) (pathOut, res string, ok bool) { } } if !parsedArgs { - idx := strings.IndexByte(pathOut, '|') - if idx == -1 { - args = pathOut - pathOut = "" - } else { - args = pathOut[:idx] - pathOut = pathOut[idx:] + // simple arg + i := 0 + for ; i < len(pathOut); i++ { + if pathOut[i] == '|' { + break + } + switch pathOut[i] { + case '{', '[', '"', '(': + s := squash(pathOut[i:]) + i += len(s) - 1 + } } + args = pathOut[:i] + pathOut = pathOut[i:] } } return pathOut, fn(json, args), true @@ -2725,19 +2796,24 @@ func unwrap(json string) string { // DisableModifiers will disable the modifier syntax var DisableModifiers = false -var modifiers = map[string]func(json, arg string) string{ - "pretty": modPretty, - "ugly": modUgly, - "reverse": modReverse, - "this": modThis, - "flatten": modFlatten, - "join": modJoin, - "valid": modValid, - "keys": modKeys, - "values": modValues, - "tostr": modToStr, - "fromstr": modFromStr, - "group": modGroup, +var modifiers map[string]func(json, arg string) string + +func init() { + modifiers = map[string]func(json, arg string) string{ + "pretty": modPretty, + "ugly": modUgly, + "reverse": modReverse, + "this": modThis, + "flatten": modFlatten, + "join": modJoin, + "valid": modValid, + "keys": modKeys, + "values": modValues, + "tostr": modToStr, + "fromstr": modFromStr, + "group": modGroup, + "dig": modDig, + } } // AddModifier binds a custom modifier command to the GJSON syntax. @@ -2848,9 +2924,13 @@ func modReverse(json, arg string) string { } // @flatten an array with child arrays. -// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,[6,7]] +// +// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,[6,7]] +// // The {"deep":true} arg can be provide for deep flattening. -// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,6,7] +// +// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,6,7] +// // The original json is returned when the json is not an array. func modFlatten(json, arg string) string { res := Parse(json) @@ -2895,7 +2975,8 @@ func modFlatten(json, arg string) string { } // @keys extracts the keys from an object. -// {"first":"Tom","last":"Smith"} -> ["first","last"] +// +// {"first":"Tom","last":"Smith"} -> ["first","last"] func modKeys(json, arg string) string { v := Parse(json) if !v.Exists() { @@ -2922,7 +3003,8 @@ func modKeys(json, arg string) string { } // @values extracts the values from an object. -// {"first":"Tom","last":"Smith"} -> ["Tom","Smith"] +// +// {"first":"Tom","last":"Smith"} -> ["Tom","Smith"] func modValues(json, arg string) string { v := Parse(json) if !v.Exists() { @@ -2947,11 +3029,17 @@ func modValues(json, arg string) string { } // @join multiple objects into a single object. -// [{"first":"Tom"},{"last":"Smith"}] -> {"first","Tom","last":"Smith"} +// +// [{"first":"Tom"},{"last":"Smith"}] -> {"first","Tom","last":"Smith"} +// // The arg can be "true" to specify that duplicate keys should be preserved. -// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":37,"age":41} +// +// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":37,"age":41} +// // Without preserved keys: -// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":41} +// +// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":41} +// // The original json is returned when the json is not an object. func modJoin(json, arg string) string { res := Parse(json) @@ -3024,7 +3112,8 @@ func modValid(json, arg string) string { } // @fromstr converts a string to json -// "{\"id\":1023,\"name\":\"alert\"}" -> {"id":1023,"name":"alert"} +// +// "{\"id\":1023,\"name\":\"alert\"}" -> {"id":1023,"name":"alert"} func modFromStr(json, arg string) string { if !Valid(json) { return "" @@ -3033,7 +3122,8 @@ func modFromStr(json, arg string) string { } // @tostr converts a string to json -// {"id":1023,"name":"alert"} -> "{\"id\":1023,\"name\":\"alert\"}" +// +// {"id":1023,"name":"alert"} -> "{\"id\":1023,\"name\":\"alert\"}" func modToStr(str, arg string) string { return string(AppendJSONString(nil, str)) } @@ -3210,11 +3300,11 @@ func revSquash(json string) string { // Paths returns the original GJSON paths for a Result where the Result came // from a simple query path that returns an array, like: // -// gjson.Get(json, "friends.#.first") +// gjson.Get(json, "friends.#.first") // // The returned value will be in the form of a JSON array: // -// ["friends.0.first","friends.1.first","friends.2.first"] +// ["friends.0.first","friends.1.first","friends.2.first"] // // The param 'json' must be the original JSON used when calling Get. // @@ -3239,11 +3329,11 @@ func (t Result) Paths(json string) []string { // Path returns the original GJSON path for a Result where the Result came // from a simple path that returns a single value, like: // -// gjson.Get(json, "friends.#(last=Murphy)") +// gjson.Get(json, "friends.#(last=Murphy)") // // The returned value will be in the form of a JSON string: // -// "friends.0" +// "friends.0" // // The param 'json' must be the original JSON used when calling Get. // @@ -3320,7 +3410,7 @@ func (t Result) Path(json string) string { if !rcomp.Exists() { goto fail } - comp := escapeComp(rcomp.String()) + comp := Escape(rcomp.String()) path = append(path, '.') path = append(path, comp...) } @@ -3335,17 +3425,31 @@ fail: // isSafePathKeyChar returns true if the input character is safe for not // needing escaping. func isSafePathKeyChar(c byte) bool { - return c <= ' ' || c > '~' || c == '_' || c == '-' || c == ':' || - (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || - (c >= '0' && c <= '9') + return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || + (c >= '0' && c <= '9') || c <= ' ' || c > '~' || c == '_' || + c == '-' || c == ':' } -// escapeComp escaped a path compontent, making it safe for generating a -// path for later use. -func escapeComp(comp string) string { +// Escape returns an escaped path component. +// +// json := `{ +// "user":{ +// "first.name": "Janet", +// "last.name": "Prichard" +// } +// }` +// user := gjson.Get(json, "user") +// println(user.Get(gjson.Escape("first.name")) +// println(user.Get(gjson.Escape("last.name")) +// // Output: +// // Janet +// // Prichard +func Escape(comp string) string { for i := 0; i < len(comp); i++ { if !isSafePathKeyChar(comp[i]) { - ncomp := []byte(comp[:i]) + ncomp := make([]byte, len(comp)+1) + copy(ncomp, comp[:i]) + ncomp = ncomp[:i] for ; i < len(comp); i++ { if !isSafePathKeyChar(comp[i]) { ncomp = append(ncomp, '\\') @@ -3357,3 +3461,30 @@ func escapeComp(comp string) string { } return comp } + +func parseRecursiveDescent(all []Result, parent Result, path string) []Result { + if res := parent.Get(path); res.Exists() { + all = append(all, res) + } + if parent.IsArray() || parent.IsObject() { + parent.ForEach(func(_, val Result) bool { + all = parseRecursiveDescent(all, val, path) + return true + }) + } + return all +} + +func modDig(json, arg string) string { + all := parseRecursiveDescent(nil, Parse(json), arg) + var out []byte + out = append(out, '[') + for i, res := range all { + if i > 0 { + out = append(out, ',') + } + out = append(out, res.Raw...) + } + out = append(out, ']') + return string(out) +} diff --git a/vendor/github.com/whosonfirst/go-whosonfirst-spr/v2/whosonfirst.go b/vendor/github.com/whosonfirst/go-whosonfirst-spr/v2/whosonfirst.go index cdd0756..0b28f91 100644 --- a/vendor/github.com/whosonfirst/go-whosonfirst-spr/v2/whosonfirst.go +++ b/vendor/github.com/whosonfirst/go-whosonfirst-spr/v2/whosonfirst.go @@ -13,6 +13,11 @@ import ( "strconv" ) +// It would be nice to be able to omit zero-length arrays (wof:belongsto, etc) +// but apparently it's harder than you think... +// https://github.com/golang/go/issues/45669 +// https://github.com/golang/go/issues/22480 + // WOFStandardPlacesResult is a struct that implements the `StandardPlacesResult` for // Who's On First GeoJSON Feature records. type WOFStandardPlacesResult struct { diff --git a/vendor/github.com/whosonfirst/go-whosonfirst-uri/doc.go b/vendor/github.com/whosonfirst/go-whosonfirst-uri/doc.go index 2257177..eb16aad 100644 --- a/vendor/github.com/whosonfirst/go-whosonfirst-uri/doc.go +++ b/vendor/github.com/whosonfirst/go-whosonfirst-uri/doc.go @@ -32,7 +32,7 @@ // rel_path, _ := uri.Id2RelPath(101736545, args) // abs_path, _ := uri.Id2AbsPath("/usr/local/data", 101736545, args) // -//Produces: +// Produces: // // 101736545-alt-mapzen-display-1024.geojson // 101/736/545/101736545-alt-mapzen-display-1024.geojson diff --git a/vendor/github.com/whosonfirst/go-whosonfirst-uri/uri.go b/vendor/github.com/whosonfirst/go-whosonfirst-uri/uri.go index e457941..f5ce92b 100644 --- a/vendor/github.com/whosonfirst/go-whosonfirst-uri/uri.go +++ b/vendor/github.com/whosonfirst/go-whosonfirst-uri/uri.go @@ -95,6 +95,32 @@ func NewAlternateURIArgs(source string, function string, extras ...string) *URIA return &u } +// Return a `URIArgs` struct representing an alternate geometry derive from 'label' (which is expected to +// be the value of a valid "src:alt_label" or "src:geom_alt" property. +func NewAlternateURIArgsFromAltLabel(label string) (*URIArgs, error) { + + parts := strings.Split(label, "-") + + if len(parts) < 1 { + return nil, fmt.Errorf("Invalid alt label") + } + + source := parts[0] + function := "" + extras := make([]string, 0) + + if len(parts) > 1 { + function = parts[1] + } + + if len(parts) > 2 { + extras = parts[2:] + } + + uri_args := NewAlternateURIArgs(source, function, extras...) + return uri_args, nil +} + // See also: https://github.com/whosonfirst/whosonfirst-cookbook/blob/master/how_to/creating_alt_geometries.md // Id2Fname parses a Who's On First ID and one or more URIArgs instances (in practice just one instance) in to a filename. diff --git a/vendor/go.mongodb.org/mongo-driver/LICENSE b/vendor/go.mongodb.org/mongo-driver/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bson.go b/vendor/go.mongodb.org/mongo-driver/bson/bson.go new file mode 100644 index 0000000..a0d8185 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bson.go @@ -0,0 +1,50 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// +// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer +// See THIRD-PARTY-NOTICES for original license terms. + +package bson // import "go.mongodb.org/mongo-driver/bson" + +import ( + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// Zeroer allows custom struct types to implement a report of zero +// state. All struct types that don't implement Zeroer or where IsZero +// returns false are considered to be not zero. +type Zeroer interface { + IsZero() bool +} + +// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters, +// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead. +// +// A D should not be constructed with duplicate key names, as that can cause undefined server behavior. +// +// Example usage: +// +// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}} +type D = primitive.D + +// E represents a BSON element for a D. It is usually used inside a D. +type E = primitive.E + +// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not +// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be +// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead. +// +// Example usage: +// +// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159} +type M = primitive.M + +// An A is an ordered representation of a BSON array. +// +// Example usage: +// +// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}} +type A = primitive.A diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go new file mode 100644 index 0000000..4e24f9e --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go @@ -0,0 +1,50 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" +) + +// ArrayCodec is the Codec used for bsoncore.Array values. +type ArrayCodec struct{} + +var defaultArrayCodec = NewArrayCodec() + +// NewArrayCodec returns an ArrayCodec. +func NewArrayCodec() *ArrayCodec { + return &ArrayCodec{} +} + +// EncodeValue is the ValueEncoder for bsoncore.Array values. +func (ac *ArrayCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCoreArray { + return ValueEncoderError{Name: "CoreArrayEncodeValue", Types: []reflect.Type{tCoreArray}, Received: val} + } + + arr := val.Interface().(bsoncore.Array) + return bsonrw.Copier{}.CopyArrayFromBytes(vw, arr) +} + +// DecodeValue is the ValueDecoder for bsoncore.Array values. +func (ac *ArrayCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCoreArray { + return ValueDecoderError{Name: "CoreArrayDecodeValue", Types: []reflect.Type{tCoreArray}, Received: val} + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, 0)) + } + + val.SetLen(0) + arr, err := bsonrw.Copier{}.AppendArrayBytes(val.Interface().(bsoncore.Array), vr) + val.Set(reflect.ValueOf(arr)) + return err +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go new file mode 100644 index 0000000..098ed69 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go @@ -0,0 +1,238 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec // import "go.mongodb.org/mongo-driver/bson/bsoncodec" + +import ( + "fmt" + "reflect" + "strings" + + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +var ( + emptyValue = reflect.Value{} +) + +// Marshaler is an interface implemented by types that can marshal themselves +// into a BSON document represented as bytes. The bytes returned must be a valid +// BSON document if the error is nil. +type Marshaler interface { + MarshalBSON() ([]byte, error) +} + +// ValueMarshaler is an interface implemented by types that can marshal +// themselves into a BSON value as bytes. The type must be the valid type for +// the bytes returned. The bytes and byte type together must be valid if the +// error is nil. +type ValueMarshaler interface { + MarshalBSONValue() (bsontype.Type, []byte, error) +} + +// Unmarshaler is an interface implemented by types that can unmarshal a BSON +// document representation of themselves. The BSON bytes can be assumed to be +// valid. UnmarshalBSON must copy the BSON bytes if it wishes to retain the data +// after returning. +type Unmarshaler interface { + UnmarshalBSON([]byte) error +} + +// ValueUnmarshaler is an interface implemented by types that can unmarshal a +// BSON value representation of themselves. The BSON bytes and type can be +// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it +// wishes to retain the data after returning. +type ValueUnmarshaler interface { + UnmarshalBSONValue(bsontype.Type, []byte) error +} + +// ValueEncoderError is an error returned from a ValueEncoder when the provided value can't be +// encoded by the ValueEncoder. +type ValueEncoderError struct { + Name string + Types []reflect.Type + Kinds []reflect.Kind + Received reflect.Value +} + +func (vee ValueEncoderError) Error() string { + typeKinds := make([]string, 0, len(vee.Types)+len(vee.Kinds)) + for _, t := range vee.Types { + typeKinds = append(typeKinds, t.String()) + } + for _, k := range vee.Kinds { + if k == reflect.Map { + typeKinds = append(typeKinds, "map[string]*") + continue + } + typeKinds = append(typeKinds, k.String()) + } + received := vee.Received.Kind().String() + if vee.Received.IsValid() { + received = vee.Received.Type().String() + } + return fmt.Sprintf("%s can only encode valid %s, but got %s", vee.Name, strings.Join(typeKinds, ", "), received) +} + +// ValueDecoderError is an error returned from a ValueDecoder when the provided value can't be +// decoded by the ValueDecoder. +type ValueDecoderError struct { + Name string + Types []reflect.Type + Kinds []reflect.Kind + Received reflect.Value +} + +func (vde ValueDecoderError) Error() string { + typeKinds := make([]string, 0, len(vde.Types)+len(vde.Kinds)) + for _, t := range vde.Types { + typeKinds = append(typeKinds, t.String()) + } + for _, k := range vde.Kinds { + if k == reflect.Map { + typeKinds = append(typeKinds, "map[string]*") + continue + } + typeKinds = append(typeKinds, k.String()) + } + received := vde.Received.Kind().String() + if vde.Received.IsValid() { + received = vde.Received.Type().String() + } + return fmt.Sprintf("%s can only decode valid and settable %s, but got %s", vde.Name, strings.Join(typeKinds, ", "), received) +} + +// EncodeContext is the contextual information required for a Codec to encode a +// value. +type EncodeContext struct { + *Registry + MinSize bool +} + +// DecodeContext is the contextual information required for a Codec to decode a +// value. +type DecodeContext struct { + *Registry + Truncate bool + + // Ancestor is the type of a containing document. This is mainly used to determine what type + // should be used when decoding an embedded document into an empty interface. For example, if + // Ancestor is a bson.M, BSON embedded document values being decoded into an empty interface + // will be decoded into a bson.M. + // + // Deprecated: Use DefaultDocumentM or DefaultDocumentD instead. + Ancestor reflect.Type + + // defaultDocumentType specifies the Go type to decode top-level and nested BSON documents into. In particular, the + // usage for this field is restricted to data typed as "interface{}" or "map[string]interface{}". If DocumentType is + // set to a type that a BSON document cannot be unmarshaled into (e.g. "string"), unmarshalling will result in an + // error. DocumentType overrides the Ancestor field. + defaultDocumentType reflect.Type +} + +// DefaultDocumentM will decode empty documents using the primitive.M type. This behavior is restricted to data typed as +// "interface{}" or "map[string]interface{}". +func (dc *DecodeContext) DefaultDocumentM() { + dc.defaultDocumentType = reflect.TypeOf(primitive.M{}) +} + +// DefaultDocumentD will decode empty documents using the primitive.D type. This behavior is restricted to data typed as +// "interface{}" or "map[string]interface{}". +func (dc *DecodeContext) DefaultDocumentD() { + dc.defaultDocumentType = reflect.TypeOf(primitive.D{}) +} + +// ValueCodec is the interface that groups the methods to encode and decode +// values. +type ValueCodec interface { + ValueEncoder + ValueDecoder +} + +// ValueEncoder is the interface implemented by types that can handle the encoding of a value. +type ValueEncoder interface { + EncodeValue(EncodeContext, bsonrw.ValueWriter, reflect.Value) error +} + +// ValueEncoderFunc is an adapter function that allows a function with the correct signature to be +// used as a ValueEncoder. +type ValueEncoderFunc func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error + +// EncodeValue implements the ValueEncoder interface. +func (fn ValueEncoderFunc) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + return fn(ec, vw, val) +} + +// ValueDecoder is the interface implemented by types that can handle the decoding of a value. +type ValueDecoder interface { + DecodeValue(DecodeContext, bsonrw.ValueReader, reflect.Value) error +} + +// ValueDecoderFunc is an adapter function that allows a function with the correct signature to be +// used as a ValueDecoder. +type ValueDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) error + +// DecodeValue implements the ValueDecoder interface. +func (fn ValueDecoderFunc) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + return fn(dc, vr, val) +} + +// typeDecoder is the interface implemented by types that can handle the decoding of a value given its type. +type typeDecoder interface { + decodeType(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error) +} + +// typeDecoderFunc is an adapter function that allows a function with the correct signature to be used as a typeDecoder. +type typeDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error) + +func (fn typeDecoderFunc) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + return fn(dc, vr, t) +} + +// decodeAdapter allows two functions with the correct signatures to be used as both a ValueDecoder and typeDecoder. +type decodeAdapter struct { + ValueDecoderFunc + typeDecoderFunc +} + +var _ ValueDecoder = decodeAdapter{} +var _ typeDecoder = decodeAdapter{} + +// decodeTypeOrValue calls decoder.decodeType is decoder is a typeDecoder. Otherwise, it allocates a new element of type +// t and calls decoder.DecodeValue on it. +func decodeTypeOrValue(decoder ValueDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + td, _ := decoder.(typeDecoder) + return decodeTypeOrValueWithInfo(decoder, td, dc, vr, t, true) +} + +func decodeTypeOrValueWithInfo(vd ValueDecoder, td typeDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type, convert bool) (reflect.Value, error) { + if td != nil { + val, err := td.decodeType(dc, vr, t) + if err == nil && convert && val.Type() != t { + // This conversion step is necessary for slices and maps. If a user declares variables like: + // + // type myBool bool + // var m map[string]myBool + // + // and tries to decode BSON bytes into the map, the decoding will fail if this conversion is not present + // because we'll try to assign a value of type bool to one of type myBool. + val = val.Convert(t) + } + return val, err + } + + val := reflect.New(t).Elem() + err := vd.DecodeValue(dc, vr, val) + return val, err +} + +// CodecZeroer is the interface implemented by Codecs that can also determine if +// a value of the type that would be encoded is zero. +type CodecZeroer interface { + IsTypeZero(interface{}) bool +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go new file mode 100644 index 0000000..5a916cc --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go @@ -0,0 +1,111 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "fmt" + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +// ByteSliceCodec is the Codec used for []byte values. +type ByteSliceCodec struct { + EncodeNilAsEmpty bool +} + +var ( + defaultByteSliceCodec = NewByteSliceCodec() + + _ ValueCodec = defaultByteSliceCodec + _ typeDecoder = defaultByteSliceCodec +) + +// NewByteSliceCodec returns a StringCodec with options opts. +func NewByteSliceCodec(opts ...*bsonoptions.ByteSliceCodecOptions) *ByteSliceCodec { + byteSliceOpt := bsonoptions.MergeByteSliceCodecOptions(opts...) + codec := ByteSliceCodec{} + if byteSliceOpt.EncodeNilAsEmpty != nil { + codec.EncodeNilAsEmpty = *byteSliceOpt.EncodeNilAsEmpty + } + return &codec +} + +// EncodeValue is the ValueEncoder for []byte. +func (bsc *ByteSliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tByteSlice { + return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + if val.IsNil() && !bsc.EncodeNilAsEmpty { + return vw.WriteNull() + } + return vw.WriteBinary(val.Interface().([]byte)) +} + +func (bsc *ByteSliceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tByteSlice { + return emptyValue, ValueDecoderError{ + Name: "ByteSliceDecodeValue", + Types: []reflect.Type{tByteSlice}, + Received: reflect.Zero(t), + } + } + + var data []byte + var err error + switch vrType := vr.Type(); vrType { + case bsontype.String: + str, err := vr.ReadString() + if err != nil { + return emptyValue, err + } + data = []byte(str) + case bsontype.Symbol: + sym, err := vr.ReadSymbol() + if err != nil { + return emptyValue, err + } + data = []byte(sym) + case bsontype.Binary: + var subtype byte + data, subtype, err = vr.ReadBinary() + if err != nil { + return emptyValue, err + } + if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld { + return emptyValue, decodeBinaryError{subtype: subtype, typeName: "[]byte"} + } + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a []byte", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(data), nil +} + +// DecodeValue is the ValueDecoder for []byte. +func (bsc *ByteSliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tByteSlice { + return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + + elem, err := bsc.decodeType(dc, vr, tByteSlice) + if err != nil { + return err + } + + val.Set(elem) + return nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go new file mode 100644 index 0000000..cb8180f --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go @@ -0,0 +1,63 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonrw" +) + +// condAddrEncoder is the encoder used when a pointer to the encoding value has an encoder. +type condAddrEncoder struct { + canAddrEnc ValueEncoder + elseEnc ValueEncoder +} + +var _ ValueEncoder = (*condAddrEncoder)(nil) + +// newCondAddrEncoder returns an condAddrEncoder. +func newCondAddrEncoder(canAddrEnc, elseEnc ValueEncoder) *condAddrEncoder { + encoder := condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc} + return &encoder +} + +// EncodeValue is the ValueEncoderFunc for a value that may be addressable. +func (cae *condAddrEncoder) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if val.CanAddr() { + return cae.canAddrEnc.EncodeValue(ec, vw, val) + } + if cae.elseEnc != nil { + return cae.elseEnc.EncodeValue(ec, vw, val) + } + return ErrNoEncoder{Type: val.Type()} +} + +// condAddrDecoder is the decoder used when a pointer to the value has a decoder. +type condAddrDecoder struct { + canAddrDec ValueDecoder + elseDec ValueDecoder +} + +var _ ValueDecoder = (*condAddrDecoder)(nil) + +// newCondAddrDecoder returns an CondAddrDecoder. +func newCondAddrDecoder(canAddrDec, elseDec ValueDecoder) *condAddrDecoder { + decoder := condAddrDecoder{canAddrDec: canAddrDec, elseDec: elseDec} + return &decoder +} + +// DecodeValue is the ValueDecoderFunc for a value that may be addressable. +func (cad *condAddrDecoder) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if val.CanAddr() { + return cad.canAddrDec.DecodeValue(dc, vr, val) + } + if cad.elseDec != nil { + return cad.elseDec.DecodeValue(dc, vr, val) + } + return ErrNoDecoder{Type: val.Type()} +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go new file mode 100644 index 0000000..e95cab5 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go @@ -0,0 +1,1729 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "reflect" + "strconv" + "time" + + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" +) + +var ( + defaultValueDecoders DefaultValueDecoders + errCannotTruncate = errors.New("float64 can only be truncated to an integer type when truncation is enabled") +) + +type decodeBinaryError struct { + subtype byte + typeName string +} + +func (d decodeBinaryError) Error() string { + return fmt.Sprintf("only binary values with subtype 0x00 or 0x02 can be decoded into %s, but got subtype %v", d.typeName, d.subtype) +} + +func newDefaultStructCodec() *StructCodec { + codec, err := NewStructCodec(DefaultStructTagParser) + if err != nil { + // This function is called from the codec registration path, so errors can't be propagated. If there's an error + // constructing the StructCodec, we panic to avoid losing it. + panic(fmt.Errorf("error creating default StructCodec: %v", err)) + } + return codec +} + +// DefaultValueDecoders is a namespace type for the default ValueDecoders used +// when creating a registry. +type DefaultValueDecoders struct{} + +// RegisterDefaultDecoders will register the decoder methods attached to DefaultValueDecoders with +// the provided RegistryBuilder. +// +// There is no support for decoding map[string]interface{} because there is no decoder for +// interface{}, so users must either register this decoder themselves or use the +// EmptyInterfaceDecoder available in the bson package. +func (dvd DefaultValueDecoders) RegisterDefaultDecoders(rb *RegistryBuilder) { + if rb == nil { + panic(errors.New("argument to RegisterDefaultDecoders must not be nil")) + } + + intDecoder := decodeAdapter{dvd.IntDecodeValue, dvd.intDecodeType} + floatDecoder := decodeAdapter{dvd.FloatDecodeValue, dvd.floatDecodeType} + + rb. + RegisterTypeDecoder(tD, ValueDecoderFunc(dvd.DDecodeValue)). + RegisterTypeDecoder(tBinary, decodeAdapter{dvd.BinaryDecodeValue, dvd.binaryDecodeType}). + RegisterTypeDecoder(tUndefined, decodeAdapter{dvd.UndefinedDecodeValue, dvd.undefinedDecodeType}). + RegisterTypeDecoder(tDateTime, decodeAdapter{dvd.DateTimeDecodeValue, dvd.dateTimeDecodeType}). + RegisterTypeDecoder(tNull, decodeAdapter{dvd.NullDecodeValue, dvd.nullDecodeType}). + RegisterTypeDecoder(tRegex, decodeAdapter{dvd.RegexDecodeValue, dvd.regexDecodeType}). + RegisterTypeDecoder(tDBPointer, decodeAdapter{dvd.DBPointerDecodeValue, dvd.dBPointerDecodeType}). + RegisterTypeDecoder(tTimestamp, decodeAdapter{dvd.TimestampDecodeValue, dvd.timestampDecodeType}). + RegisterTypeDecoder(tMinKey, decodeAdapter{dvd.MinKeyDecodeValue, dvd.minKeyDecodeType}). + RegisterTypeDecoder(tMaxKey, decodeAdapter{dvd.MaxKeyDecodeValue, dvd.maxKeyDecodeType}). + RegisterTypeDecoder(tJavaScript, decodeAdapter{dvd.JavaScriptDecodeValue, dvd.javaScriptDecodeType}). + RegisterTypeDecoder(tSymbol, decodeAdapter{dvd.SymbolDecodeValue, dvd.symbolDecodeType}). + RegisterTypeDecoder(tByteSlice, defaultByteSliceCodec). + RegisterTypeDecoder(tTime, defaultTimeCodec). + RegisterTypeDecoder(tEmpty, defaultEmptyInterfaceCodec). + RegisterTypeDecoder(tCoreArray, defaultArrayCodec). + RegisterTypeDecoder(tOID, decodeAdapter{dvd.ObjectIDDecodeValue, dvd.objectIDDecodeType}). + RegisterTypeDecoder(tDecimal, decodeAdapter{dvd.Decimal128DecodeValue, dvd.decimal128DecodeType}). + RegisterTypeDecoder(tJSONNumber, decodeAdapter{dvd.JSONNumberDecodeValue, dvd.jsonNumberDecodeType}). + RegisterTypeDecoder(tURL, decodeAdapter{dvd.URLDecodeValue, dvd.urlDecodeType}). + RegisterTypeDecoder(tCoreDocument, ValueDecoderFunc(dvd.CoreDocumentDecodeValue)). + RegisterTypeDecoder(tCodeWithScope, decodeAdapter{dvd.CodeWithScopeDecodeValue, dvd.codeWithScopeDecodeType}). + RegisterDefaultDecoder(reflect.Bool, decodeAdapter{dvd.BooleanDecodeValue, dvd.booleanDecodeType}). + RegisterDefaultDecoder(reflect.Int, intDecoder). + RegisterDefaultDecoder(reflect.Int8, intDecoder). + RegisterDefaultDecoder(reflect.Int16, intDecoder). + RegisterDefaultDecoder(reflect.Int32, intDecoder). + RegisterDefaultDecoder(reflect.Int64, intDecoder). + RegisterDefaultDecoder(reflect.Uint, defaultUIntCodec). + RegisterDefaultDecoder(reflect.Uint8, defaultUIntCodec). + RegisterDefaultDecoder(reflect.Uint16, defaultUIntCodec). + RegisterDefaultDecoder(reflect.Uint32, defaultUIntCodec). + RegisterDefaultDecoder(reflect.Uint64, defaultUIntCodec). + RegisterDefaultDecoder(reflect.Float32, floatDecoder). + RegisterDefaultDecoder(reflect.Float64, floatDecoder). + RegisterDefaultDecoder(reflect.Array, ValueDecoderFunc(dvd.ArrayDecodeValue)). + RegisterDefaultDecoder(reflect.Map, defaultMapCodec). + RegisterDefaultDecoder(reflect.Slice, defaultSliceCodec). + RegisterDefaultDecoder(reflect.String, defaultStringCodec). + RegisterDefaultDecoder(reflect.Struct, newDefaultStructCodec()). + RegisterDefaultDecoder(reflect.Ptr, NewPointerCodec()). + RegisterTypeMapEntry(bsontype.Double, tFloat64). + RegisterTypeMapEntry(bsontype.String, tString). + RegisterTypeMapEntry(bsontype.Array, tA). + RegisterTypeMapEntry(bsontype.Binary, tBinary). + RegisterTypeMapEntry(bsontype.Undefined, tUndefined). + RegisterTypeMapEntry(bsontype.ObjectID, tOID). + RegisterTypeMapEntry(bsontype.Boolean, tBool). + RegisterTypeMapEntry(bsontype.DateTime, tDateTime). + RegisterTypeMapEntry(bsontype.Regex, tRegex). + RegisterTypeMapEntry(bsontype.DBPointer, tDBPointer). + RegisterTypeMapEntry(bsontype.JavaScript, tJavaScript). + RegisterTypeMapEntry(bsontype.Symbol, tSymbol). + RegisterTypeMapEntry(bsontype.CodeWithScope, tCodeWithScope). + RegisterTypeMapEntry(bsontype.Int32, tInt32). + RegisterTypeMapEntry(bsontype.Int64, tInt64). + RegisterTypeMapEntry(bsontype.Timestamp, tTimestamp). + RegisterTypeMapEntry(bsontype.Decimal128, tDecimal). + RegisterTypeMapEntry(bsontype.MinKey, tMinKey). + RegisterTypeMapEntry(bsontype.MaxKey, tMaxKey). + RegisterTypeMapEntry(bsontype.Type(0), tD). + RegisterTypeMapEntry(bsontype.EmbeddedDocument, tD). + RegisterHookDecoder(tValueUnmarshaler, ValueDecoderFunc(dvd.ValueUnmarshalerDecodeValue)). + RegisterHookDecoder(tUnmarshaler, ValueDecoderFunc(dvd.UnmarshalerDecodeValue)) +} + +// DDecodeValue is the ValueDecoderFunc for primitive.D instances. +func (dvd DefaultValueDecoders) DDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.IsValid() || !val.CanSet() || val.Type() != tD { + return ValueDecoderError{Name: "DDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case bsontype.Type(0), bsontype.EmbeddedDocument: + dc.Ancestor = tD + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + default: + return fmt.Errorf("cannot decode %v into a primitive.D", vrType) + } + + dr, err := vr.ReadDocument() + if err != nil { + return err + } + + decoder, err := dc.LookupDecoder(tEmpty) + if err != nil { + return err + } + tEmptyTypeDecoder, _ := decoder.(typeDecoder) + + // Use the elements in the provided value if it's non nil. Otherwise, allocate a new D instance. + var elems primitive.D + if !val.IsNil() { + val.SetLen(0) + elems = val.Interface().(primitive.D) + } else { + elems = make(primitive.D, 0) + } + + for { + key, elemVr, err := dr.ReadElement() + if err == bsonrw.ErrEOD { + break + } else if err != nil { + return err + } + + // Pass false for convert because we don't need to call reflect.Value.Convert for tEmpty. + elem, err := decodeTypeOrValueWithInfo(decoder, tEmptyTypeDecoder, dc, elemVr, tEmpty, false) + if err != nil { + return err + } + + elems = append(elems, primitive.E{Key: key, Value: elem.Interface()}) + } + + val.Set(reflect.ValueOf(elems)) + return nil +} + +func (dvd DefaultValueDecoders) booleanDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t.Kind() != reflect.Bool { + return emptyValue, ValueDecoderError{ + Name: "BooleanDecodeValue", + Kinds: []reflect.Kind{reflect.Bool}, + Received: reflect.Zero(t), + } + } + + var b bool + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + b = (i32 != 0) + case bsontype.Int64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + b = (i64 != 0) + case bsontype.Double: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + b = (f64 != 0) + case bsontype.Boolean: + b, err = vr.ReadBoolean() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a boolean", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(b), nil +} + +// BooleanDecodeValue is the ValueDecoderFunc for bool types. +func (dvd DefaultValueDecoders) BooleanDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.IsValid() || !val.CanSet() || val.Kind() != reflect.Bool { + return ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val} + } + + elem, err := dvd.booleanDecodeType(dctx, vr, val.Type()) + if err != nil { + return err + } + + val.SetBool(elem.Bool()) + return nil +} + +func (DefaultValueDecoders) intDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + var i64 int64 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + i64 = int64(i32) + case bsontype.Int64: + i64, err = vr.ReadInt64() + if err != nil { + return emptyValue, err + } + case bsontype.Double: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + if !dc.Truncate && math.Floor(f64) != f64 { + return emptyValue, errCannotTruncate + } + if f64 > float64(math.MaxInt64) { + return emptyValue, fmt.Errorf("%g overflows int64", f64) + } + i64 = int64(f64) + case bsontype.Boolean: + b, err := vr.ReadBoolean() + if err != nil { + return emptyValue, err + } + if b { + i64 = 1 + } + case bsontype.Null: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType) + } + + switch t.Kind() { + case reflect.Int8: + if i64 < math.MinInt8 || i64 > math.MaxInt8 { + return emptyValue, fmt.Errorf("%d overflows int8", i64) + } + + return reflect.ValueOf(int8(i64)), nil + case reflect.Int16: + if i64 < math.MinInt16 || i64 > math.MaxInt16 { + return emptyValue, fmt.Errorf("%d overflows int16", i64) + } + + return reflect.ValueOf(int16(i64)), nil + case reflect.Int32: + if i64 < math.MinInt32 || i64 > math.MaxInt32 { + return emptyValue, fmt.Errorf("%d overflows int32", i64) + } + + return reflect.ValueOf(int32(i64)), nil + case reflect.Int64: + return reflect.ValueOf(i64), nil + case reflect.Int: + if int64(int(i64)) != i64 { // Can we fit this inside of an int + return emptyValue, fmt.Errorf("%d overflows int", i64) + } + + return reflect.ValueOf(int(i64)), nil + default: + return emptyValue, ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.Zero(t), + } + } +} + +// IntDecodeValue is the ValueDecoderFunc for int types. +func (dvd DefaultValueDecoders) IntDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() { + return ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: val, + } + } + + elem, err := dvd.intDecodeType(dc, vr, val.Type()) + if err != nil { + return err + } + + val.SetInt(elem.Int()) + return nil +} + +// UintDecodeValue is the ValueDecoderFunc for uint types. +// +// Deprecated: UintDecodeValue is not registered by default. Use UintCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) UintDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + var i64 int64 + var err error + switch vr.Type() { + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return err + } + i64 = int64(i32) + case bsontype.Int64: + i64, err = vr.ReadInt64() + if err != nil { + return err + } + case bsontype.Double: + f64, err := vr.ReadDouble() + if err != nil { + return err + } + if !dc.Truncate && math.Floor(f64) != f64 { + return errors.New("UintDecodeValue can only truncate float64 to an integer type when truncation is enabled") + } + if f64 > float64(math.MaxInt64) { + return fmt.Errorf("%g overflows int64", f64) + } + i64 = int64(f64) + case bsontype.Boolean: + b, err := vr.ReadBoolean() + if err != nil { + return err + } + if b { + i64 = 1 + } + default: + return fmt.Errorf("cannot decode %v into an integer type", vr.Type()) + } + + if !val.CanSet() { + return ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: val, + } + } + + switch val.Kind() { + case reflect.Uint8: + if i64 < 0 || i64 > math.MaxUint8 { + return fmt.Errorf("%d overflows uint8", i64) + } + case reflect.Uint16: + if i64 < 0 || i64 > math.MaxUint16 { + return fmt.Errorf("%d overflows uint16", i64) + } + case reflect.Uint32: + if i64 < 0 || i64 > math.MaxUint32 { + return fmt.Errorf("%d overflows uint32", i64) + } + case reflect.Uint64: + if i64 < 0 { + return fmt.Errorf("%d overflows uint64", i64) + } + case reflect.Uint: + if i64 < 0 || int64(uint(i64)) != i64 { // Can we fit this inside of an uint + return fmt.Errorf("%d overflows uint", i64) + } + default: + return ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: val, + } + } + + val.SetUint(uint64(i64)) + return nil +} + +func (dvd DefaultValueDecoders) floatDecodeType(ec DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + var f float64 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + f = float64(i32) + case bsontype.Int64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + f = float64(i64) + case bsontype.Double: + f, err = vr.ReadDouble() + if err != nil { + return emptyValue, err + } + case bsontype.Boolean: + b, err := vr.ReadBoolean() + if err != nil { + return emptyValue, err + } + if b { + f = 1 + } + case bsontype.Null: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into a float32 or float64 type", vrType) + } + + switch t.Kind() { + case reflect.Float32: + if !ec.Truncate && float64(float32(f)) != f { + return emptyValue, errCannotTruncate + } + + return reflect.ValueOf(float32(f)), nil + case reflect.Float64: + return reflect.ValueOf(f), nil + default: + return emptyValue, ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.Zero(t), + } + } +} + +// FloatDecodeValue is the ValueDecoderFunc for float types. +func (dvd DefaultValueDecoders) FloatDecodeValue(ec DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() { + return ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: val, + } + } + + elem, err := dvd.floatDecodeType(ec, vr, val.Type()) + if err != nil { + return err + } + + val.SetFloat(elem.Float()) + return nil +} + +// StringDecodeValue is the ValueDecoderFunc for string types. +// +// Deprecated: StringDecodeValue is not registered by default. Use StringCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) StringDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + var str string + var err error + switch vr.Type() { + // TODO(GODRIVER-577): Handle JavaScript and Symbol BSON types when allowed. + case bsontype.String: + str, err = vr.ReadString() + if err != nil { + return err + } + default: + return fmt.Errorf("cannot decode %v into a string type", vr.Type()) + } + if !val.CanSet() || val.Kind() != reflect.String { + return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val} + } + + val.SetString(str) + return nil +} + +func (DefaultValueDecoders) javaScriptDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tJavaScript { + return emptyValue, ValueDecoderError{ + Name: "JavaScriptDecodeValue", + Types: []reflect.Type{tJavaScript}, + Received: reflect.Zero(t), + } + } + + var js string + var err error + switch vrType := vr.Type(); vrType { + case bsontype.JavaScript: + js, err = vr.ReadJavascript() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a primitive.JavaScript", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.JavaScript(js)), nil +} + +// JavaScriptDecodeValue is the ValueDecoderFunc for the primitive.JavaScript type. +func (dvd DefaultValueDecoders) JavaScriptDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tJavaScript { + return ValueDecoderError{Name: "JavaScriptDecodeValue", Types: []reflect.Type{tJavaScript}, Received: val} + } + + elem, err := dvd.javaScriptDecodeType(dctx, vr, tJavaScript) + if err != nil { + return err + } + + val.SetString(elem.String()) + return nil +} + +func (DefaultValueDecoders) symbolDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tSymbol { + return emptyValue, ValueDecoderError{ + Name: "SymbolDecodeValue", + Types: []reflect.Type{tSymbol}, + Received: reflect.Zero(t), + } + } + + var symbol string + var err error + switch vrType := vr.Type(); vrType { + case bsontype.String: + symbol, err = vr.ReadString() + case bsontype.Symbol: + symbol, err = vr.ReadSymbol() + case bsontype.Binary: + data, subtype, err := vr.ReadBinary() + if err != nil { + return emptyValue, err + } + + if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld { + return emptyValue, decodeBinaryError{subtype: subtype, typeName: "primitive.Symbol"} + } + symbol = string(data) + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Symbol", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Symbol(symbol)), nil +} + +// SymbolDecodeValue is the ValueDecoderFunc for the primitive.Symbol type. +func (dvd DefaultValueDecoders) SymbolDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tSymbol { + return ValueDecoderError{Name: "SymbolDecodeValue", Types: []reflect.Type{tSymbol}, Received: val} + } + + elem, err := dvd.symbolDecodeType(dctx, vr, tSymbol) + if err != nil { + return err + } + + val.SetString(elem.String()) + return nil +} + +func (DefaultValueDecoders) binaryDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tBinary { + return emptyValue, ValueDecoderError{ + Name: "BinaryDecodeValue", + Types: []reflect.Type{tBinary}, + Received: reflect.Zero(t), + } + } + + var data []byte + var subtype byte + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Binary: + data, subtype, err = vr.ReadBinary() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Binary", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Binary{Subtype: subtype, Data: data}), nil +} + +// BinaryDecodeValue is the ValueDecoderFunc for Binary. +func (dvd DefaultValueDecoders) BinaryDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tBinary { + return ValueDecoderError{Name: "BinaryDecodeValue", Types: []reflect.Type{tBinary}, Received: val} + } + + elem, err := dvd.binaryDecodeType(dc, vr, tBinary) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) undefinedDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tUndefined { + return emptyValue, ValueDecoderError{ + Name: "UndefinedDecodeValue", + Types: []reflect.Type{tUndefined}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Undefined: + err = vr.ReadUndefined() + case bsontype.Null: + err = vr.ReadNull() + default: + return emptyValue, fmt.Errorf("cannot decode %v into an Undefined", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Undefined{}), nil +} + +// UndefinedDecodeValue is the ValueDecoderFunc for Undefined. +func (dvd DefaultValueDecoders) UndefinedDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tUndefined { + return ValueDecoderError{Name: "UndefinedDecodeValue", Types: []reflect.Type{tUndefined}, Received: val} + } + + elem, err := dvd.undefinedDecodeType(dc, vr, tUndefined) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// Accept both 12-byte string and pretty-printed 24-byte hex string formats. +func (dvd DefaultValueDecoders) objectIDDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tOID { + return emptyValue, ValueDecoderError{ + Name: "ObjectIDDecodeValue", + Types: []reflect.Type{tOID}, + Received: reflect.Zero(t), + } + } + + var oid primitive.ObjectID + var err error + switch vrType := vr.Type(); vrType { + case bsontype.ObjectID: + oid, err = vr.ReadObjectID() + if err != nil { + return emptyValue, err + } + case bsontype.String: + str, err := vr.ReadString() + if err != nil { + return emptyValue, err + } + if oid, err = primitive.ObjectIDFromHex(str); err == nil { + break + } + if len(str) != 12 { + return emptyValue, fmt.Errorf("an ObjectID string must be exactly 12 bytes long (got %v)", len(str)) + } + byteArr := []byte(str) + copy(oid[:], byteArr) + case bsontype.Null: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into an ObjectID", vrType) + } + + return reflect.ValueOf(oid), nil +} + +// ObjectIDDecodeValue is the ValueDecoderFunc for primitive.ObjectID. +func (dvd DefaultValueDecoders) ObjectIDDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tOID { + return ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}, Received: val} + } + + elem, err := dvd.objectIDDecodeType(dc, vr, tOID) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) dateTimeDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDateTime { + return emptyValue, ValueDecoderError{ + Name: "DateTimeDecodeValue", + Types: []reflect.Type{tDateTime}, + Received: reflect.Zero(t), + } + } + + var dt int64 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.DateTime: + dt, err = vr.ReadDateTime() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a DateTime", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.DateTime(dt)), nil +} + +// DateTimeDecodeValue is the ValueDecoderFunc for DateTime. +func (dvd DefaultValueDecoders) DateTimeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDateTime { + return ValueDecoderError{Name: "DateTimeDecodeValue", Types: []reflect.Type{tDateTime}, Received: val} + } + + elem, err := dvd.dateTimeDecodeType(dc, vr, tDateTime) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) nullDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tNull { + return emptyValue, ValueDecoderError{ + Name: "NullDecodeValue", + Types: []reflect.Type{tNull}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Undefined: + err = vr.ReadUndefined() + case bsontype.Null: + err = vr.ReadNull() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Null", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Null{}), nil +} + +// NullDecodeValue is the ValueDecoderFunc for Null. +func (dvd DefaultValueDecoders) NullDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tNull { + return ValueDecoderError{Name: "NullDecodeValue", Types: []reflect.Type{tNull}, Received: val} + } + + elem, err := dvd.nullDecodeType(dc, vr, tNull) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) regexDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tRegex { + return emptyValue, ValueDecoderError{ + Name: "RegexDecodeValue", + Types: []reflect.Type{tRegex}, + Received: reflect.Zero(t), + } + } + + var pattern, options string + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Regex: + pattern, options, err = vr.ReadRegex() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Regex", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Regex{Pattern: pattern, Options: options}), nil +} + +// RegexDecodeValue is the ValueDecoderFunc for Regex. +func (dvd DefaultValueDecoders) RegexDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tRegex { + return ValueDecoderError{Name: "RegexDecodeValue", Types: []reflect.Type{tRegex}, Received: val} + } + + elem, err := dvd.regexDecodeType(dc, vr, tRegex) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) dBPointerDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDBPointer { + return emptyValue, ValueDecoderError{ + Name: "DBPointerDecodeValue", + Types: []reflect.Type{tDBPointer}, + Received: reflect.Zero(t), + } + } + + var ns string + var pointer primitive.ObjectID + var err error + switch vrType := vr.Type(); vrType { + case bsontype.DBPointer: + ns, pointer, err = vr.ReadDBPointer() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a DBPointer", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.DBPointer{DB: ns, Pointer: pointer}), nil +} + +// DBPointerDecodeValue is the ValueDecoderFunc for DBPointer. +func (dvd DefaultValueDecoders) DBPointerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDBPointer { + return ValueDecoderError{Name: "DBPointerDecodeValue", Types: []reflect.Type{tDBPointer}, Received: val} + } + + elem, err := dvd.dBPointerDecodeType(dc, vr, tDBPointer) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) timestampDecodeType(dc DecodeContext, vr bsonrw.ValueReader, reflectType reflect.Type) (reflect.Value, error) { + if reflectType != tTimestamp { + return emptyValue, ValueDecoderError{ + Name: "TimestampDecodeValue", + Types: []reflect.Type{tTimestamp}, + Received: reflect.Zero(reflectType), + } + } + + var t, incr uint32 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Timestamp: + t, incr, err = vr.ReadTimestamp() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Timestamp", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.Timestamp{T: t, I: incr}), nil +} + +// TimestampDecodeValue is the ValueDecoderFunc for Timestamp. +func (dvd DefaultValueDecoders) TimestampDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tTimestamp { + return ValueDecoderError{Name: "TimestampDecodeValue", Types: []reflect.Type{tTimestamp}, Received: val} + } + + elem, err := dvd.timestampDecodeType(dc, vr, tTimestamp) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) minKeyDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tMinKey { + return emptyValue, ValueDecoderError{ + Name: "MinKeyDecodeValue", + Types: []reflect.Type{tMinKey}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case bsontype.MinKey: + err = vr.ReadMinKey() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a MinKey", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.MinKey{}), nil +} + +// MinKeyDecodeValue is the ValueDecoderFunc for MinKey. +func (dvd DefaultValueDecoders) MinKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tMinKey { + return ValueDecoderError{Name: "MinKeyDecodeValue", Types: []reflect.Type{tMinKey}, Received: val} + } + + elem, err := dvd.minKeyDecodeType(dc, vr, tMinKey) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (DefaultValueDecoders) maxKeyDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tMaxKey { + return emptyValue, ValueDecoderError{ + Name: "MaxKeyDecodeValue", + Types: []reflect.Type{tMaxKey}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case bsontype.MaxKey: + err = vr.ReadMaxKey() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a MaxKey", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(primitive.MaxKey{}), nil +} + +// MaxKeyDecodeValue is the ValueDecoderFunc for MaxKey. +func (dvd DefaultValueDecoders) MaxKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tMaxKey { + return ValueDecoderError{Name: "MaxKeyDecodeValue", Types: []reflect.Type{tMaxKey}, Received: val} + } + + elem, err := dvd.maxKeyDecodeType(dc, vr, tMaxKey) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (dvd DefaultValueDecoders) decimal128DecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDecimal { + return emptyValue, ValueDecoderError{ + Name: "Decimal128DecodeValue", + Types: []reflect.Type{tDecimal}, + Received: reflect.Zero(t), + } + } + + var d128 primitive.Decimal128 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Decimal128: + d128, err = vr.ReadDecimal128() + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Decimal128", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(d128), nil +} + +// Decimal128DecodeValue is the ValueDecoderFunc for primitive.Decimal128. +func (dvd DefaultValueDecoders) Decimal128DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDecimal { + return ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}, Received: val} + } + + elem, err := dvd.decimal128DecodeType(dctx, vr, tDecimal) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (dvd DefaultValueDecoders) jsonNumberDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tJSONNumber { + return emptyValue, ValueDecoderError{ + Name: "JSONNumberDecodeValue", + Types: []reflect.Type{tJSONNumber}, + Received: reflect.Zero(t), + } + } + + var jsonNum json.Number + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Double: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatFloat(f64, 'f', -1, 64)) + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatInt(int64(i32), 10)) + case bsontype.Int64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatInt(i64, 10)) + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a json.Number", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(jsonNum), nil +} + +// JSONNumberDecodeValue is the ValueDecoderFunc for json.Number. +func (dvd DefaultValueDecoders) JSONNumberDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tJSONNumber { + return ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}, Received: val} + } + + elem, err := dvd.jsonNumberDecodeType(dc, vr, tJSONNumber) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (dvd DefaultValueDecoders) urlDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tURL { + return emptyValue, ValueDecoderError{ + Name: "URLDecodeValue", + Types: []reflect.Type{tURL}, + Received: reflect.Zero(t), + } + } + + urlPtr := &url.URL{} + var err error + switch vrType := vr.Type(); vrType { + case bsontype.String: + var str string // Declare str here to avoid shadowing err during the ReadString call. + str, err = vr.ReadString() + if err != nil { + return emptyValue, err + } + + urlPtr, err = url.Parse(str) + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a *url.URL", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(urlPtr).Elem(), nil +} + +// URLDecodeValue is the ValueDecoderFunc for url.URL. +func (dvd DefaultValueDecoders) URLDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tURL { + return ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}, Received: val} + } + + elem, err := dvd.urlDecodeType(dc, vr, tURL) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// TimeDecodeValue is the ValueDecoderFunc for time.Time. +// +// Deprecated: TimeDecodeValue is not registered by default. Use TimeCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) TimeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if vr.Type() != bsontype.DateTime { + return fmt.Errorf("cannot decode %v into a time.Time", vr.Type()) + } + + dt, err := vr.ReadDateTime() + if err != nil { + return err + } + + if !val.CanSet() || val.Type() != tTime { + return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val} + } + + val.Set(reflect.ValueOf(time.Unix(dt/1000, dt%1000*1000000).UTC())) + return nil +} + +// ByteSliceDecodeValue is the ValueDecoderFunc for []byte. +// +// Deprecated: ByteSliceDecodeValue is not registered by default. Use ByteSliceCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) ByteSliceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if vr.Type() != bsontype.Binary && vr.Type() != bsontype.Null { + return fmt.Errorf("cannot decode %v into a []byte", vr.Type()) + } + + if !val.CanSet() || val.Type() != tByteSlice { + return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + + if vr.Type() == bsontype.Null { + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + } + + data, subtype, err := vr.ReadBinary() + if err != nil { + return err + } + if subtype != 0x00 { + return fmt.Errorf("ByteSliceDecodeValue can only be used to decode subtype 0x00 for %s, got %v", bsontype.Binary, subtype) + } + + val.Set(reflect.ValueOf(data)) + return nil +} + +// MapDecodeValue is the ValueDecoderFunc for map[string]* types. +// +// Deprecated: MapDecodeValue is not registered by default. Use MapCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) MapDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String { + return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val} + } + + switch vr.Type() { + case bsontype.Type(0), bsontype.EmbeddedDocument: + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + default: + return fmt.Errorf("cannot decode %v into a %s", vr.Type(), val.Type()) + } + + dr, err := vr.ReadDocument() + if err != nil { + return err + } + + if val.IsNil() { + val.Set(reflect.MakeMap(val.Type())) + } + + eType := val.Type().Elem() + decoder, err := dc.LookupDecoder(eType) + if err != nil { + return err + } + + if eType == tEmpty { + dc.Ancestor = val.Type() + } + + keyType := val.Type().Key() + for { + key, vr, err := dr.ReadElement() + if err == bsonrw.ErrEOD { + break + } + if err != nil { + return err + } + + elem := reflect.New(eType).Elem() + + err = decoder.DecodeValue(dc, vr, elem) + if err != nil { + return err + } + + val.SetMapIndex(reflect.ValueOf(key).Convert(keyType), elem) + } + return nil +} + +// ArrayDecodeValue is the ValueDecoderFunc for array types. +func (dvd DefaultValueDecoders) ArrayDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Array { + return ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case bsontype.Array: + case bsontype.Type(0), bsontype.EmbeddedDocument: + if val.Type().Elem() != tE { + return fmt.Errorf("cannot decode document into %s", val.Type()) + } + case bsontype.Binary: + if val.Type().Elem() != tByte { + return fmt.Errorf("ArrayDecodeValue can only be used to decode binary into a byte array, got %v", vrType) + } + data, subtype, err := vr.ReadBinary() + if err != nil { + return err + } + if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld { + return fmt.Errorf("ArrayDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype) + } + + if len(data) > val.Len() { + return fmt.Errorf("more elements returned in array than can fit inside %s", val.Type()) + } + + for idx, elem := range data { + val.Index(idx).Set(reflect.ValueOf(elem)) + } + return nil + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + case bsontype.Undefined: + val.Set(reflect.Zero(val.Type())) + return vr.ReadUndefined() + default: + return fmt.Errorf("cannot decode %v into an array", vrType) + } + + var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error) + switch val.Type().Elem() { + case tE: + elemsFunc = dvd.decodeD + default: + elemsFunc = dvd.decodeDefault + } + + elems, err := elemsFunc(dc, vr, val) + if err != nil { + return err + } + + if len(elems) > val.Len() { + return fmt.Errorf("more elements returned in array than can fit inside %s, got %v elements", val.Type(), len(elems)) + } + + for idx, elem := range elems { + val.Index(idx).Set(elem) + } + + return nil +} + +// SliceDecodeValue is the ValueDecoderFunc for slice types. +// +// Deprecated: SliceDecodeValue is not registered by default. Use SliceCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) SliceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.Slice { + return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + switch vr.Type() { + case bsontype.Array: + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + case bsontype.Type(0), bsontype.EmbeddedDocument: + if val.Type().Elem() != tE { + return fmt.Errorf("cannot decode document into %s", val.Type()) + } + default: + return fmt.Errorf("cannot decode %v into a slice", vr.Type()) + } + + var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error) + switch val.Type().Elem() { + case tE: + dc.Ancestor = val.Type() + elemsFunc = dvd.decodeD + default: + elemsFunc = dvd.decodeDefault + } + + elems, err := elemsFunc(dc, vr, val) + if err != nil { + return err + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, len(elems))) + } + + val.SetLen(0) + val.Set(reflect.Append(val, elems...)) + + return nil +} + +// ValueUnmarshalerDecodeValue is the ValueDecoderFunc for ValueUnmarshaler implementations. +func (dvd DefaultValueDecoders) ValueUnmarshalerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.IsValid() || (!val.Type().Implements(tValueUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tValueUnmarshaler)) { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + + if val.Kind() == reflect.Ptr && val.IsNil() { + if !val.CanSet() { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + val.Set(reflect.New(val.Type().Elem())) + } + + if !val.Type().Implements(tValueUnmarshaler) { + if !val.CanAddr() { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + val = val.Addr() // If the type doesn't implement the interface, a pointer to it must. + } + + t, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) + if err != nil { + return err + } + + fn := val.Convert(tValueUnmarshaler).MethodByName("UnmarshalBSONValue") + errVal := fn.Call([]reflect.Value{reflect.ValueOf(t), reflect.ValueOf(src)})[0] + if !errVal.IsNil() { + return errVal.Interface().(error) + } + return nil +} + +// UnmarshalerDecodeValue is the ValueDecoderFunc for Unmarshaler implementations. +func (dvd DefaultValueDecoders) UnmarshalerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.IsValid() || (!val.Type().Implements(tUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tUnmarshaler)) { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + + if val.Kind() == reflect.Ptr && val.IsNil() { + if !val.CanSet() { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + val.Set(reflect.New(val.Type().Elem())) + } + + _, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) + if err != nil { + return err + } + + // If the target Go value is a pointer and the BSON field value is empty, set the value to the + // zero value of the pointer (nil) and don't call UnmarshalBSON. UnmarshalBSON has no way to + // change the pointer value from within the function (only the value at the pointer address), + // so it can't set the pointer to "nil" itself. Since the most common Go value for an empty BSON + // field value is "nil", we set "nil" here and don't call UnmarshalBSON. This behavior matches + // the behavior of the Go "encoding/json" unmarshaler when the target Go value is a pointer and + // the JSON field value is "null". + if val.Kind() == reflect.Ptr && len(src) == 0 { + val.Set(reflect.Zero(val.Type())) + return nil + } + + if !val.Type().Implements(tUnmarshaler) { + if !val.CanAddr() { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + val = val.Addr() // If the type doesn't implement the interface, a pointer to it must. + } + + fn := val.Convert(tUnmarshaler).MethodByName("UnmarshalBSON") + errVal := fn.Call([]reflect.Value{reflect.ValueOf(src)})[0] + if !errVal.IsNil() { + return errVal.Interface().(error) + } + return nil +} + +// EmptyInterfaceDecodeValue is the ValueDecoderFunc for interface{}. +// +// Deprecated: EmptyInterfaceDecodeValue is not registered by default. Use EmptyInterfaceCodec.DecodeValue instead. +func (dvd DefaultValueDecoders) EmptyInterfaceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tEmpty { + return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + rtype, err := dc.LookupTypeMapEntry(vr.Type()) + if err != nil { + switch vr.Type() { + case bsontype.EmbeddedDocument: + if dc.Ancestor != nil { + rtype = dc.Ancestor + break + } + rtype = tD + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + default: + return err + } + } + + decoder, err := dc.LookupDecoder(rtype) + if err != nil { + return err + } + + elem := reflect.New(rtype).Elem() + err = decoder.DecodeValue(dc, vr, elem) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// CoreDocumentDecodeValue is the ValueDecoderFunc for bsoncore.Document. +func (DefaultValueDecoders) CoreDocumentDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCoreDocument { + return ValueDecoderError{Name: "CoreDocumentDecodeValue", Types: []reflect.Type{tCoreDocument}, Received: val} + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, 0)) + } + + val.SetLen(0) + + cdoc, err := bsonrw.Copier{}.AppendDocumentBytes(val.Interface().(bsoncore.Document), vr) + val.Set(reflect.ValueOf(cdoc)) + return err +} + +func (dvd DefaultValueDecoders) decodeDefault(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) ([]reflect.Value, error) { + elems := make([]reflect.Value, 0) + + ar, err := vr.ReadArray() + if err != nil { + return nil, err + } + + eType := val.Type().Elem() + + decoder, err := dc.LookupDecoder(eType) + if err != nil { + return nil, err + } + eTypeDecoder, _ := decoder.(typeDecoder) + + idx := 0 + for { + vr, err := ar.ReadValue() + if err == bsonrw.ErrEOA { + break + } + if err != nil { + return nil, err + } + + elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true) + if err != nil { + return nil, newDecodeError(strconv.Itoa(idx), err) + } + elems = append(elems, elem) + idx++ + } + + return elems, nil +} + +func (dvd DefaultValueDecoders) readCodeWithScope(dc DecodeContext, vr bsonrw.ValueReader) (primitive.CodeWithScope, error) { + var cws primitive.CodeWithScope + + code, dr, err := vr.ReadCodeWithScope() + if err != nil { + return cws, err + } + + scope := reflect.New(tD).Elem() + elems, err := dvd.decodeElemsFromDocumentReader(dc, dr) + if err != nil { + return cws, err + } + + scope.Set(reflect.MakeSlice(tD, 0, len(elems))) + scope.Set(reflect.Append(scope, elems...)) + + cws = primitive.CodeWithScope{ + Code: primitive.JavaScript(code), + Scope: scope.Interface().(primitive.D), + } + return cws, nil +} + +func (dvd DefaultValueDecoders) codeWithScopeDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tCodeWithScope { + return emptyValue, ValueDecoderError{ + Name: "CodeWithScopeDecodeValue", + Types: []reflect.Type{tCodeWithScope}, + Received: reflect.Zero(t), + } + } + + var cws primitive.CodeWithScope + var err error + switch vrType := vr.Type(); vrType { + case bsontype.CodeWithScope: + cws, err = dvd.readCodeWithScope(dc, vr) + case bsontype.Null: + err = vr.ReadNull() + case bsontype.Undefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a primitive.CodeWithScope", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(cws), nil +} + +// CodeWithScopeDecodeValue is the ValueDecoderFunc for CodeWithScope. +func (dvd DefaultValueDecoders) CodeWithScopeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCodeWithScope { + return ValueDecoderError{Name: "CodeWithScopeDecodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val} + } + + elem, err := dvd.codeWithScopeDecodeType(dc, vr, tCodeWithScope) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func (dvd DefaultValueDecoders) decodeD(dc DecodeContext, vr bsonrw.ValueReader, _ reflect.Value) ([]reflect.Value, error) { + switch vr.Type() { + case bsontype.Type(0), bsontype.EmbeddedDocument: + default: + return nil, fmt.Errorf("cannot decode %v into a D", vr.Type()) + } + + dr, err := vr.ReadDocument() + if err != nil { + return nil, err + } + + return dvd.decodeElemsFromDocumentReader(dc, dr) +} + +func (DefaultValueDecoders) decodeElemsFromDocumentReader(dc DecodeContext, dr bsonrw.DocumentReader) ([]reflect.Value, error) { + decoder, err := dc.LookupDecoder(tEmpty) + if err != nil { + return nil, err + } + + elems := make([]reflect.Value, 0) + for { + key, vr, err := dr.ReadElement() + if err == bsonrw.ErrEOD { + break + } + if err != nil { + return nil, err + } + + val := reflect.New(tEmpty).Elem() + err = decoder.DecodeValue(dc, vr, val) + if err != nil { + return nil, newDecodeError(key, err) + } + + elems = append(elems, reflect.ValueOf(primitive.E{Key: key, Value: val.Interface()})) + } + + return elems, nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go new file mode 100644 index 0000000..6bdb43c --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go @@ -0,0 +1,766 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "reflect" + "sync" + "time" + + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" +) + +var defaultValueEncoders DefaultValueEncoders + +var bvwPool = bsonrw.NewBSONValueWriterPool() + +var errInvalidValue = errors.New("cannot encode invalid element") + +var sliceWriterPool = sync.Pool{ + New: func() interface{} { + sw := make(bsonrw.SliceWriter, 0) + return &sw + }, +} + +func encodeElement(ec EncodeContext, dw bsonrw.DocumentWriter, e primitive.E) error { + vw, err := dw.WriteDocumentElement(e.Key) + if err != nil { + return err + } + + if e.Value == nil { + return vw.WriteNull() + } + encoder, err := ec.LookupEncoder(reflect.TypeOf(e.Value)) + if err != nil { + return err + } + + err = encoder.EncodeValue(ec, vw, reflect.ValueOf(e.Value)) + if err != nil { + return err + } + return nil +} + +// DefaultValueEncoders is a namespace type for the default ValueEncoders used +// when creating a registry. +type DefaultValueEncoders struct{} + +// RegisterDefaultEncoders will register the encoder methods attached to DefaultValueEncoders with +// the provided RegistryBuilder. +func (dve DefaultValueEncoders) RegisterDefaultEncoders(rb *RegistryBuilder) { + if rb == nil { + panic(errors.New("argument to RegisterDefaultEncoders must not be nil")) + } + rb. + RegisterTypeEncoder(tByteSlice, defaultByteSliceCodec). + RegisterTypeEncoder(tTime, defaultTimeCodec). + RegisterTypeEncoder(tEmpty, defaultEmptyInterfaceCodec). + RegisterTypeEncoder(tCoreArray, defaultArrayCodec). + RegisterTypeEncoder(tOID, ValueEncoderFunc(dve.ObjectIDEncodeValue)). + RegisterTypeEncoder(tDecimal, ValueEncoderFunc(dve.Decimal128EncodeValue)). + RegisterTypeEncoder(tJSONNumber, ValueEncoderFunc(dve.JSONNumberEncodeValue)). + RegisterTypeEncoder(tURL, ValueEncoderFunc(dve.URLEncodeValue)). + RegisterTypeEncoder(tJavaScript, ValueEncoderFunc(dve.JavaScriptEncodeValue)). + RegisterTypeEncoder(tSymbol, ValueEncoderFunc(dve.SymbolEncodeValue)). + RegisterTypeEncoder(tBinary, ValueEncoderFunc(dve.BinaryEncodeValue)). + RegisterTypeEncoder(tUndefined, ValueEncoderFunc(dve.UndefinedEncodeValue)). + RegisterTypeEncoder(tDateTime, ValueEncoderFunc(dve.DateTimeEncodeValue)). + RegisterTypeEncoder(tNull, ValueEncoderFunc(dve.NullEncodeValue)). + RegisterTypeEncoder(tRegex, ValueEncoderFunc(dve.RegexEncodeValue)). + RegisterTypeEncoder(tDBPointer, ValueEncoderFunc(dve.DBPointerEncodeValue)). + RegisterTypeEncoder(tTimestamp, ValueEncoderFunc(dve.TimestampEncodeValue)). + RegisterTypeEncoder(tMinKey, ValueEncoderFunc(dve.MinKeyEncodeValue)). + RegisterTypeEncoder(tMaxKey, ValueEncoderFunc(dve.MaxKeyEncodeValue)). + RegisterTypeEncoder(tCoreDocument, ValueEncoderFunc(dve.CoreDocumentEncodeValue)). + RegisterTypeEncoder(tCodeWithScope, ValueEncoderFunc(dve.CodeWithScopeEncodeValue)). + RegisterDefaultEncoder(reflect.Bool, ValueEncoderFunc(dve.BooleanEncodeValue)). + RegisterDefaultEncoder(reflect.Int, ValueEncoderFunc(dve.IntEncodeValue)). + RegisterDefaultEncoder(reflect.Int8, ValueEncoderFunc(dve.IntEncodeValue)). + RegisterDefaultEncoder(reflect.Int16, ValueEncoderFunc(dve.IntEncodeValue)). + RegisterDefaultEncoder(reflect.Int32, ValueEncoderFunc(dve.IntEncodeValue)). + RegisterDefaultEncoder(reflect.Int64, ValueEncoderFunc(dve.IntEncodeValue)). + RegisterDefaultEncoder(reflect.Uint, defaultUIntCodec). + RegisterDefaultEncoder(reflect.Uint8, defaultUIntCodec). + RegisterDefaultEncoder(reflect.Uint16, defaultUIntCodec). + RegisterDefaultEncoder(reflect.Uint32, defaultUIntCodec). + RegisterDefaultEncoder(reflect.Uint64, defaultUIntCodec). + RegisterDefaultEncoder(reflect.Float32, ValueEncoderFunc(dve.FloatEncodeValue)). + RegisterDefaultEncoder(reflect.Float64, ValueEncoderFunc(dve.FloatEncodeValue)). + RegisterDefaultEncoder(reflect.Array, ValueEncoderFunc(dve.ArrayEncodeValue)). + RegisterDefaultEncoder(reflect.Map, defaultMapCodec). + RegisterDefaultEncoder(reflect.Slice, defaultSliceCodec). + RegisterDefaultEncoder(reflect.String, defaultStringCodec). + RegisterDefaultEncoder(reflect.Struct, newDefaultStructCodec()). + RegisterDefaultEncoder(reflect.Ptr, NewPointerCodec()). + RegisterHookEncoder(tValueMarshaler, ValueEncoderFunc(dve.ValueMarshalerEncodeValue)). + RegisterHookEncoder(tMarshaler, ValueEncoderFunc(dve.MarshalerEncodeValue)). + RegisterHookEncoder(tProxy, ValueEncoderFunc(dve.ProxyEncodeValue)) +} + +// BooleanEncodeValue is the ValueEncoderFunc for bool types. +func (dve DefaultValueEncoders) BooleanEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Bool { + return ValueEncoderError{Name: "BooleanEncodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val} + } + return vw.WriteBoolean(val.Bool()) +} + +func fitsIn32Bits(i int64) bool { + return math.MinInt32 <= i && i <= math.MaxInt32 +} + +// IntEncodeValue is the ValueEncoderFunc for int types. +func (dve DefaultValueEncoders) IntEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Int8, reflect.Int16, reflect.Int32: + return vw.WriteInt32(int32(val.Int())) + case reflect.Int: + i64 := val.Int() + if fitsIn32Bits(i64) { + return vw.WriteInt32(int32(i64)) + } + return vw.WriteInt64(i64) + case reflect.Int64: + i64 := val.Int() + if ec.MinSize && fitsIn32Bits(i64) { + return vw.WriteInt32(int32(i64)) + } + return vw.WriteInt64(i64) + } + + return ValueEncoderError{ + Name: "IntEncodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: val, + } +} + +// UintEncodeValue is the ValueEncoderFunc for uint types. +// +// Deprecated: UintEncodeValue is not registered by default. Use UintCodec.EncodeValue instead. +func (dve DefaultValueEncoders) UintEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Uint8, reflect.Uint16: + return vw.WriteInt32(int32(val.Uint())) + case reflect.Uint, reflect.Uint32, reflect.Uint64: + u64 := val.Uint() + if ec.MinSize && u64 <= math.MaxInt32 { + return vw.WriteInt32(int32(u64)) + } + if u64 > math.MaxInt64 { + return fmt.Errorf("%d overflows int64", u64) + } + return vw.WriteInt64(int64(u64)) + } + + return ValueEncoderError{ + Name: "UintEncodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: val, + } +} + +// FloatEncodeValue is the ValueEncoderFunc for float types. +func (dve DefaultValueEncoders) FloatEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Float32, reflect.Float64: + return vw.WriteDouble(val.Float()) + } + + return ValueEncoderError{Name: "FloatEncodeValue", Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, Received: val} +} + +// StringEncodeValue is the ValueEncoderFunc for string types. +// +// Deprecated: StringEncodeValue is not registered by default. Use StringCodec.EncodeValue instead. +func (dve DefaultValueEncoders) StringEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if val.Kind() != reflect.String { + return ValueEncoderError{ + Name: "StringEncodeValue", + Kinds: []reflect.Kind{reflect.String}, + Received: val, + } + } + + return vw.WriteString(val.String()) +} + +// ObjectIDEncodeValue is the ValueEncoderFunc for primitive.ObjectID. +func (dve DefaultValueEncoders) ObjectIDEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tOID { + return ValueEncoderError{Name: "ObjectIDEncodeValue", Types: []reflect.Type{tOID}, Received: val} + } + return vw.WriteObjectID(val.Interface().(primitive.ObjectID)) +} + +// Decimal128EncodeValue is the ValueEncoderFunc for primitive.Decimal128. +func (dve DefaultValueEncoders) Decimal128EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDecimal { + return ValueEncoderError{Name: "Decimal128EncodeValue", Types: []reflect.Type{tDecimal}, Received: val} + } + return vw.WriteDecimal128(val.Interface().(primitive.Decimal128)) +} + +// JSONNumberEncodeValue is the ValueEncoderFunc for json.Number. +func (dve DefaultValueEncoders) JSONNumberEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tJSONNumber { + return ValueEncoderError{Name: "JSONNumberEncodeValue", Types: []reflect.Type{tJSONNumber}, Received: val} + } + jsnum := val.Interface().(json.Number) + + // Attempt int first, then float64 + if i64, err := jsnum.Int64(); err == nil { + return dve.IntEncodeValue(ec, vw, reflect.ValueOf(i64)) + } + + f64, err := jsnum.Float64() + if err != nil { + return err + } + + return dve.FloatEncodeValue(ec, vw, reflect.ValueOf(f64)) +} + +// URLEncodeValue is the ValueEncoderFunc for url.URL. +func (dve DefaultValueEncoders) URLEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tURL { + return ValueEncoderError{Name: "URLEncodeValue", Types: []reflect.Type{tURL}, Received: val} + } + u := val.Interface().(url.URL) + return vw.WriteString(u.String()) +} + +// TimeEncodeValue is the ValueEncoderFunc for time.TIme. +// +// Deprecated: TimeEncodeValue is not registered by default. Use TimeCodec.EncodeValue instead. +func (dve DefaultValueEncoders) TimeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tTime { + return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val} + } + tt := val.Interface().(time.Time) + dt := primitive.NewDateTimeFromTime(tt) + return vw.WriteDateTime(int64(dt)) +} + +// ByteSliceEncodeValue is the ValueEncoderFunc for []byte. +// +// Deprecated: ByteSliceEncodeValue is not registered by default. Use ByteSliceCodec.EncodeValue instead. +func (dve DefaultValueEncoders) ByteSliceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tByteSlice { + return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + if val.IsNil() { + return vw.WriteNull() + } + return vw.WriteBinary(val.Interface().([]byte)) +} + +// MapEncodeValue is the ValueEncoderFunc for map[string]* types. +// +// Deprecated: MapEncodeValue is not registered by default. Use MapCodec.EncodeValue instead. +func (dve DefaultValueEncoders) MapEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String { + return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val} + } + + if val.IsNil() { + // If we have a nill map but we can't WriteNull, that means we're probably trying to encode + // to a TopLevel document. We can't currently tell if this is what actually happened, but if + // there's a deeper underlying problem, the error will also be returned from WriteDocument, + // so just continue. The operations on a map reflection value are valid, so we can call + // MapKeys within mapEncodeValue without a problem. + err := vw.WriteNull() + if err == nil { + return nil + } + } + + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + return dve.mapEncodeValue(ec, dw, val, nil) +} + +// mapEncodeValue handles encoding of the values of a map. The collisionFn returns +// true if the provided key exists, this is mainly used for inline maps in the +// struct codec. +func (dve DefaultValueEncoders) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error { + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + keys := val.MapKeys() + for _, key := range keys { + if collisionFn != nil && collisionFn(key.String()) { + return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key) + } + + currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.MapIndex(key)) + if lookupErr != nil && lookupErr != errInvalidValue { + return lookupErr + } + + vw, err := dw.WriteDocumentElement(key.String()) + if err != nil { + return err + } + + if lookupErr == errInvalidValue { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() +} + +// ArrayEncodeValue is the ValueEncoderFunc for array types. +func (dve DefaultValueEncoders) ArrayEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Array { + return ValueEncoderError{Name: "ArrayEncodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val} + } + + // If we have a []primitive.E we want to treat it as a document instead of as an array. + if val.Type().Elem() == tE { + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + e := val.Index(idx).Interface().(primitive.E) + err = encodeElement(ec, dw, e) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() + } + + // If we have a []byte we want to treat it as a binary instead of as an array. + if val.Type().Elem() == tByte { + var byteSlice []byte + for idx := 0; idx < val.Len(); idx++ { + byteSlice = append(byteSlice, val.Index(idx).Interface().(byte)) + } + return vw.WriteBinary(byteSlice) + } + + aw, err := vw.WriteArray() + if err != nil { + return err + } + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx)) + if lookupErr != nil && lookupErr != errInvalidValue { + return lookupErr + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + if lookupErr == errInvalidValue { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + return aw.WriteArrayEnd() +} + +// SliceEncodeValue is the ValueEncoderFunc for slice types. +// +// Deprecated: SliceEncodeValue is not registered by default. Use SliceCodec.EncodeValue instead. +func (dve DefaultValueEncoders) SliceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Slice { + return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + if val.IsNil() { + return vw.WriteNull() + } + + // If we have a []primitive.E we want to treat it as a document instead of as an array. + if val.Type().ConvertibleTo(tD) { + d := val.Convert(tD).Interface().(primitive.D) + + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + for _, e := range d { + err = encodeElement(ec, dw, e) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() + } + + aw, err := vw.WriteArray() + if err != nil { + return err + } + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx)) + if lookupErr != nil && lookupErr != errInvalidValue { + return lookupErr + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + if lookupErr == errInvalidValue { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + return aw.WriteArrayEnd() +} + +func (dve DefaultValueEncoders) lookupElementEncoder(ec EncodeContext, origEncoder ValueEncoder, currVal reflect.Value) (ValueEncoder, reflect.Value, error) { + if origEncoder != nil || (currVal.Kind() != reflect.Interface) { + return origEncoder, currVal, nil + } + currVal = currVal.Elem() + if !currVal.IsValid() { + return nil, currVal, errInvalidValue + } + currEncoder, err := ec.LookupEncoder(currVal.Type()) + + return currEncoder, currVal, err +} + +// EmptyInterfaceEncodeValue is the ValueEncoderFunc for interface{}. +// +// Deprecated: EmptyInterfaceEncodeValue is not registered by default. Use EmptyInterfaceCodec.EncodeValue instead. +func (dve DefaultValueEncoders) EmptyInterfaceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tEmpty { + return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + if val.IsNil() { + return vw.WriteNull() + } + encoder, err := ec.LookupEncoder(val.Elem().Type()) + if err != nil { + return err + } + + return encoder.EncodeValue(ec, vw, val.Elem()) +} + +// ValueMarshalerEncodeValue is the ValueEncoderFunc for ValueMarshaler implementations. +func (dve DefaultValueEncoders) ValueMarshalerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + // Either val or a pointer to val must implement ValueMarshaler + switch { + case !val.IsValid(): + return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val} + case val.Type().Implements(tValueMarshaler): + // If ValueMarshaler is implemented on a concrete type, make sure that val isn't a nil pointer + if isImplementationNil(val, tValueMarshaler) { + return vw.WriteNull() + } + case reflect.PtrTo(val.Type()).Implements(tValueMarshaler) && val.CanAddr(): + val = val.Addr() + default: + return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val} + } + + fn := val.Convert(tValueMarshaler).MethodByName("MarshalBSONValue") + returns := fn.Call(nil) + if !returns[2].IsNil() { + return returns[2].Interface().(error) + } + t, data := returns[0].Interface().(bsontype.Type), returns[1].Interface().([]byte) + return bsonrw.Copier{}.CopyValueFromBytes(vw, t, data) +} + +// MarshalerEncodeValue is the ValueEncoderFunc for Marshaler implementations. +func (dve DefaultValueEncoders) MarshalerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + // Either val or a pointer to val must implement Marshaler + switch { + case !val.IsValid(): + return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val} + case val.Type().Implements(tMarshaler): + // If Marshaler is implemented on a concrete type, make sure that val isn't a nil pointer + if isImplementationNil(val, tMarshaler) { + return vw.WriteNull() + } + case reflect.PtrTo(val.Type()).Implements(tMarshaler) && val.CanAddr(): + val = val.Addr() + default: + return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val} + } + + fn := val.Convert(tMarshaler).MethodByName("MarshalBSON") + returns := fn.Call(nil) + if !returns[1].IsNil() { + return returns[1].Interface().(error) + } + data := returns[0].Interface().([]byte) + return bsonrw.Copier{}.CopyValueFromBytes(vw, bsontype.EmbeddedDocument, data) +} + +// ProxyEncodeValue is the ValueEncoderFunc for Proxy implementations. +func (dve DefaultValueEncoders) ProxyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + // Either val or a pointer to val must implement Proxy + switch { + case !val.IsValid(): + return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val} + case val.Type().Implements(tProxy): + // If Proxy is implemented on a concrete type, make sure that val isn't a nil pointer + if isImplementationNil(val, tProxy) { + return vw.WriteNull() + } + case reflect.PtrTo(val.Type()).Implements(tProxy) && val.CanAddr(): + val = val.Addr() + default: + return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val} + } + + fn := val.Convert(tProxy).MethodByName("ProxyBSON") + returns := fn.Call(nil) + if !returns[1].IsNil() { + return returns[1].Interface().(error) + } + data := returns[0] + var encoder ValueEncoder + var err error + if data.Elem().IsValid() { + encoder, err = ec.LookupEncoder(data.Elem().Type()) + } else { + encoder, err = ec.LookupEncoder(nil) + } + if err != nil { + return err + } + return encoder.EncodeValue(ec, vw, data.Elem()) +} + +// JavaScriptEncodeValue is the ValueEncoderFunc for the primitive.JavaScript type. +func (DefaultValueEncoders) JavaScriptEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tJavaScript { + return ValueEncoderError{Name: "JavaScriptEncodeValue", Types: []reflect.Type{tJavaScript}, Received: val} + } + + return vw.WriteJavascript(val.String()) +} + +// SymbolEncodeValue is the ValueEncoderFunc for the primitive.Symbol type. +func (DefaultValueEncoders) SymbolEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tSymbol { + return ValueEncoderError{Name: "SymbolEncodeValue", Types: []reflect.Type{tSymbol}, Received: val} + } + + return vw.WriteSymbol(val.String()) +} + +// BinaryEncodeValue is the ValueEncoderFunc for Binary. +func (DefaultValueEncoders) BinaryEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tBinary { + return ValueEncoderError{Name: "BinaryEncodeValue", Types: []reflect.Type{tBinary}, Received: val} + } + b := val.Interface().(primitive.Binary) + + return vw.WriteBinaryWithSubtype(b.Data, b.Subtype) +} + +// UndefinedEncodeValue is the ValueEncoderFunc for Undefined. +func (DefaultValueEncoders) UndefinedEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tUndefined { + return ValueEncoderError{Name: "UndefinedEncodeValue", Types: []reflect.Type{tUndefined}, Received: val} + } + + return vw.WriteUndefined() +} + +// DateTimeEncodeValue is the ValueEncoderFunc for DateTime. +func (DefaultValueEncoders) DateTimeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDateTime { + return ValueEncoderError{Name: "DateTimeEncodeValue", Types: []reflect.Type{tDateTime}, Received: val} + } + + return vw.WriteDateTime(val.Int()) +} + +// NullEncodeValue is the ValueEncoderFunc for Null. +func (DefaultValueEncoders) NullEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tNull { + return ValueEncoderError{Name: "NullEncodeValue", Types: []reflect.Type{tNull}, Received: val} + } + + return vw.WriteNull() +} + +// RegexEncodeValue is the ValueEncoderFunc for Regex. +func (DefaultValueEncoders) RegexEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tRegex { + return ValueEncoderError{Name: "RegexEncodeValue", Types: []reflect.Type{tRegex}, Received: val} + } + + regex := val.Interface().(primitive.Regex) + + return vw.WriteRegex(regex.Pattern, regex.Options) +} + +// DBPointerEncodeValue is the ValueEncoderFunc for DBPointer. +func (DefaultValueEncoders) DBPointerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDBPointer { + return ValueEncoderError{Name: "DBPointerEncodeValue", Types: []reflect.Type{tDBPointer}, Received: val} + } + + dbp := val.Interface().(primitive.DBPointer) + + return vw.WriteDBPointer(dbp.DB, dbp.Pointer) +} + +// TimestampEncodeValue is the ValueEncoderFunc for Timestamp. +func (DefaultValueEncoders) TimestampEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tTimestamp { + return ValueEncoderError{Name: "TimestampEncodeValue", Types: []reflect.Type{tTimestamp}, Received: val} + } + + ts := val.Interface().(primitive.Timestamp) + + return vw.WriteTimestamp(ts.T, ts.I) +} + +// MinKeyEncodeValue is the ValueEncoderFunc for MinKey. +func (DefaultValueEncoders) MinKeyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tMinKey { + return ValueEncoderError{Name: "MinKeyEncodeValue", Types: []reflect.Type{tMinKey}, Received: val} + } + + return vw.WriteMinKey() +} + +// MaxKeyEncodeValue is the ValueEncoderFunc for MaxKey. +func (DefaultValueEncoders) MaxKeyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tMaxKey { + return ValueEncoderError{Name: "MaxKeyEncodeValue", Types: []reflect.Type{tMaxKey}, Received: val} + } + + return vw.WriteMaxKey() +} + +// CoreDocumentEncodeValue is the ValueEncoderFunc for bsoncore.Document. +func (DefaultValueEncoders) CoreDocumentEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCoreDocument { + return ValueEncoderError{Name: "CoreDocumentEncodeValue", Types: []reflect.Type{tCoreDocument}, Received: val} + } + + cdoc := val.Interface().(bsoncore.Document) + + return bsonrw.Copier{}.CopyDocumentFromBytes(vw, cdoc) +} + +// CodeWithScopeEncodeValue is the ValueEncoderFunc for CodeWithScope. +func (dve DefaultValueEncoders) CodeWithScopeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCodeWithScope { + return ValueEncoderError{Name: "CodeWithScopeEncodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val} + } + + cws := val.Interface().(primitive.CodeWithScope) + + dw, err := vw.WriteCodeWithScope(string(cws.Code)) + if err != nil { + return err + } + + sw := sliceWriterPool.Get().(*bsonrw.SliceWriter) + defer sliceWriterPool.Put(sw) + *sw = (*sw)[:0] + + scopeVW := bvwPool.Get(sw) + defer bvwPool.Put(scopeVW) + + encoder, err := ec.LookupEncoder(reflect.TypeOf(cws.Scope)) + if err != nil { + return err + } + + err = encoder.EncodeValue(ec, scopeVW, reflect.ValueOf(cws.Scope)) + if err != nil { + return err + } + + err = bsonrw.Copier{}.CopyBytesToDocumentWriter(dw, *sw) + if err != nil { + return err + } + return dw.WriteDocumentEnd() +} + +// isImplementationNil returns if val is a nil pointer and inter is implemented on a concrete type +func isImplementationNil(val reflect.Value, inter reflect.Type) bool { + vt := val.Type() + for vt.Kind() == reflect.Ptr { + vt = vt.Elem() + } + return vt.Implements(inter) && val.Kind() == reflect.Ptr && val.IsNil() +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go new file mode 100644 index 0000000..5f903eb --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go @@ -0,0 +1,90 @@ +// Copyright (C) MongoDB, Inc. 2022-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package bsoncodec provides a system for encoding values to BSON representations and decoding +// values from BSON representations. This package considers both binary BSON and ExtendedJSON as +// BSON representations. The types in this package enable a flexible system for handling this +// encoding and decoding. +// +// The codec system is composed of two parts: +// +// 1) ValueEncoders and ValueDecoders that handle encoding and decoding Go values to and from BSON +// representations. +// +// 2) A Registry that holds these ValueEncoders and ValueDecoders and provides methods for +// retrieving them. +// +// # ValueEncoders and ValueDecoders +// +// The ValueEncoder interface is implemented by types that can encode a provided Go type to BSON. +// The value to encode is provided as a reflect.Value and a bsonrw.ValueWriter is used within the +// EncodeValue method to actually create the BSON representation. For convenience, ValueEncoderFunc +// is provided to allow use of a function with the correct signature as a ValueEncoder. An +// EncodeContext instance is provided to allow implementations to lookup further ValueEncoders and +// to provide configuration information. +// +// The ValueDecoder interface is the inverse of the ValueEncoder. Implementations should ensure that +// the value they receive is settable. Similar to ValueEncoderFunc, ValueDecoderFunc is provided to +// allow the use of a function with the correct signature as a ValueDecoder. A DecodeContext +// instance is provided and serves similar functionality to the EncodeContext. +// +// # Registry and RegistryBuilder +// +// A Registry is an immutable store for ValueEncoders, ValueDecoders, and a type map. See the Registry type +// documentation for examples of registering various custom encoders and decoders. A Registry can be constructed using a +// RegistryBuilder, which handles three main types of codecs: +// +// 1. Type encoders/decoders - These can be registered using the RegisterTypeEncoder and RegisterTypeDecoder methods. +// The registered codec will be invoked when encoding/decoding a value whose type matches the registered type exactly. +// If the registered type is an interface, the codec will be invoked when encoding or decoding values whose type is the +// interface, but not for values with concrete types that implement the interface. +// +// 2. Hook encoders/decoders - These can be registered using the RegisterHookEncoder and RegisterHookDecoder methods. +// These methods only accept interface types and the registered codecs will be invoked when encoding or decoding values +// whose types implement the interface. An example of a hook defined by the driver is bson.Marshaler. The driver will +// call the MarshalBSON method for any value whose type implements bson.Marshaler, regardless of the value's concrete +// type. +// +// 3. Type map entries - This can be used to associate a BSON type with a Go type. These type associations are used when +// decoding into a bson.D/bson.M or a struct field of type interface{}. For example, by default, BSON int32 and int64 +// values decode as Go int32 and int64 instances, respectively, when decoding into a bson.D. The following code would +// change the behavior so these values decode as Go int instances instead: +// +// intType := reflect.TypeOf(int(0)) +// registryBuilder.RegisterTypeMapEntry(bsontype.Int32, intType).RegisterTypeMapEntry(bsontype.Int64, intType) +// +// 4. Kind encoder/decoders - These can be registered using the RegisterDefaultEncoder and RegisterDefaultDecoder +// methods. The registered codec will be invoked when encoding or decoding values whose reflect.Kind matches the +// registered reflect.Kind as long as the value's type doesn't match a registered type or hook encoder/decoder first. +// These methods should be used to change the behavior for all values for a specific kind. +// +// # Registry Lookup Procedure +// +// When looking up an encoder in a Registry, the precedence rules are as follows: +// +// 1. A type encoder registered for the exact type of the value. +// +// 2. A hook encoder registered for an interface that is implemented by the value or by a pointer to the value. If the +// value matches multiple hooks (e.g. the type implements bsoncodec.Marshaler and bsoncodec.ValueMarshaler), the first +// one registered will be selected. Note that registries constructed using bson.NewRegistryBuilder have driver-defined +// hooks registered for the bsoncodec.Marshaler, bsoncodec.ValueMarshaler, and bsoncodec.Proxy interfaces, so those +// will take precedence over any new hooks. +// +// 3. A kind encoder registered for the value's kind. +// +// If all of these lookups fail to find an encoder, an error of type ErrNoEncoder is returned. The same precedence +// rules apply for decoders, with the exception that an error of type ErrNoDecoder will be returned if no decoder is +// found. +// +// # DefaultValueEncoders and DefaultValueDecoders +// +// The DefaultValueEncoders and DefaultValueDecoders types provide a full set of ValueEncoders and +// ValueDecoders for handling a wide range of Go types, including all of the types within the +// primitive package. To make registering these codecs easier, a helper method on each type is +// provided. For the DefaultValueEncoders type the method is called RegisterDefaultEncoders and for +// the DefaultValueDecoders type the method is called RegisterDefaultDecoders, this method also +// handles registering type map entries for each BSON type. +package bsoncodec diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go new file mode 100644 index 0000000..eda417c --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go @@ -0,0 +1,147 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// EmptyInterfaceCodec is the Codec used for interface{} values. +type EmptyInterfaceCodec struct { + DecodeBinaryAsSlice bool +} + +var ( + defaultEmptyInterfaceCodec = NewEmptyInterfaceCodec() + + _ ValueCodec = defaultEmptyInterfaceCodec + _ typeDecoder = defaultEmptyInterfaceCodec +) + +// NewEmptyInterfaceCodec returns a EmptyInterfaceCodec with options opts. +func NewEmptyInterfaceCodec(opts ...*bsonoptions.EmptyInterfaceCodecOptions) *EmptyInterfaceCodec { + interfaceOpt := bsonoptions.MergeEmptyInterfaceCodecOptions(opts...) + + codec := EmptyInterfaceCodec{} + if interfaceOpt.DecodeBinaryAsSlice != nil { + codec.DecodeBinaryAsSlice = *interfaceOpt.DecodeBinaryAsSlice + } + return &codec +} + +// EncodeValue is the ValueEncoderFunc for interface{}. +func (eic EmptyInterfaceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tEmpty { + return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + if val.IsNil() { + return vw.WriteNull() + } + encoder, err := ec.LookupEncoder(val.Elem().Type()) + if err != nil { + return err + } + + return encoder.EncodeValue(ec, vw, val.Elem()) +} + +func (eic EmptyInterfaceCodec) getEmptyInterfaceDecodeType(dc DecodeContext, valueType bsontype.Type) (reflect.Type, error) { + isDocument := valueType == bsontype.Type(0) || valueType == bsontype.EmbeddedDocument + if isDocument { + if dc.defaultDocumentType != nil { + // If the bsontype is an embedded document and the DocumentType is set on the DecodeContext, then return + // that type. + return dc.defaultDocumentType, nil + } + if dc.Ancestor != nil { + // Using ancestor information rather than looking up the type map entry forces consistent decoding. + // If we're decoding into a bson.D, subdocuments should also be decoded as bson.D, even if a type map entry + // has been registered. + return dc.Ancestor, nil + } + } + + rtype, err := dc.LookupTypeMapEntry(valueType) + if err == nil { + return rtype, nil + } + + if isDocument { + // For documents, fallback to looking up a type map entry for bsontype.Type(0) or bsontype.EmbeddedDocument, + // depending on the original valueType. + var lookupType bsontype.Type + switch valueType { + case bsontype.Type(0): + lookupType = bsontype.EmbeddedDocument + case bsontype.EmbeddedDocument: + lookupType = bsontype.Type(0) + } + + rtype, err = dc.LookupTypeMapEntry(lookupType) + if err == nil { + return rtype, nil + } + } + + return nil, err +} + +func (eic EmptyInterfaceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tEmpty { + return emptyValue, ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.Zero(t)} + } + + rtype, err := eic.getEmptyInterfaceDecodeType(dc, vr.Type()) + if err != nil { + switch vr.Type() { + case bsontype.Null: + return reflect.Zero(t), vr.ReadNull() + default: + return emptyValue, err + } + } + + decoder, err := dc.LookupDecoder(rtype) + if err != nil { + return emptyValue, err + } + + elem, err := decodeTypeOrValue(decoder, dc, vr, rtype) + if err != nil { + return emptyValue, err + } + + if eic.DecodeBinaryAsSlice && rtype == tBinary { + binElem := elem.Interface().(primitive.Binary) + if binElem.Subtype == bsontype.BinaryGeneric || binElem.Subtype == bsontype.BinaryBinaryOld { + elem = reflect.ValueOf(binElem.Data) + } + } + + return elem, nil +} + +// DecodeValue is the ValueDecoderFunc for interface{}. +func (eic EmptyInterfaceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tEmpty { + return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + elem, err := eic.decodeType(dc, vr, val.Type()) + if err != nil { + return err + } + + val.Set(elem) + return nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go new file mode 100644 index 0000000..e1fbef9 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go @@ -0,0 +1,309 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "encoding" + "fmt" + "reflect" + "strconv" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +var defaultMapCodec = NewMapCodec() + +// MapCodec is the Codec used for map values. +type MapCodec struct { + DecodeZerosMap bool + EncodeNilAsEmpty bool + EncodeKeysWithStringer bool +} + +var _ ValueCodec = &MapCodec{} + +// KeyMarshaler is the interface implemented by an object that can marshal itself into a string key. +// This applies to types used as map keys and is similar to encoding.TextMarshaler. +type KeyMarshaler interface { + MarshalKey() (key string, err error) +} + +// KeyUnmarshaler is the interface implemented by an object that can unmarshal a string representation +// of itself. This applies to types used as map keys and is similar to encoding.TextUnmarshaler. +// +// UnmarshalKey must be able to decode the form generated by MarshalKey. +// UnmarshalKey must copy the text if it wishes to retain the text +// after returning. +type KeyUnmarshaler interface { + UnmarshalKey(key string) error +} + +// NewMapCodec returns a MapCodec with options opts. +func NewMapCodec(opts ...*bsonoptions.MapCodecOptions) *MapCodec { + mapOpt := bsonoptions.MergeMapCodecOptions(opts...) + + codec := MapCodec{} + if mapOpt.DecodeZerosMap != nil { + codec.DecodeZerosMap = *mapOpt.DecodeZerosMap + } + if mapOpt.EncodeNilAsEmpty != nil { + codec.EncodeNilAsEmpty = *mapOpt.EncodeNilAsEmpty + } + if mapOpt.EncodeKeysWithStringer != nil { + codec.EncodeKeysWithStringer = *mapOpt.EncodeKeysWithStringer + } + return &codec +} + +// EncodeValue is the ValueEncoder for map[*]* types. +func (mc *MapCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Map { + return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val} + } + + if val.IsNil() && !mc.EncodeNilAsEmpty { + // If we have a nil map but we can't WriteNull, that means we're probably trying to encode + // to a TopLevel document. We can't currently tell if this is what actually happened, but if + // there's a deeper underlying problem, the error will also be returned from WriteDocument, + // so just continue. The operations on a map reflection value are valid, so we can call + // MapKeys within mapEncodeValue without a problem. + err := vw.WriteNull() + if err == nil { + return nil + } + } + + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + return mc.mapEncodeValue(ec, dw, val, nil) +} + +// mapEncodeValue handles encoding of the values of a map. The collisionFn returns +// true if the provided key exists, this is mainly used for inline maps in the +// struct codec. +func (mc *MapCodec) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error { + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + keys := val.MapKeys() + for _, key := range keys { + keyStr, err := mc.encodeKey(key) + if err != nil { + return err + } + + if collisionFn != nil && collisionFn(keyStr) { + return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key) + } + + currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.MapIndex(key)) + if lookupErr != nil && lookupErr != errInvalidValue { + return lookupErr + } + + vw, err := dw.WriteDocumentElement(keyStr) + if err != nil { + return err + } + + if lookupErr == errInvalidValue { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() +} + +// DecodeValue is the ValueDecoder for map[string/decimal]* types. +func (mc *MapCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if val.Kind() != reflect.Map || (!val.CanSet() && val.IsNil()) { + return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case bsontype.Type(0), bsontype.EmbeddedDocument: + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + case bsontype.Undefined: + val.Set(reflect.Zero(val.Type())) + return vr.ReadUndefined() + default: + return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type()) + } + + dr, err := vr.ReadDocument() + if err != nil { + return err + } + + if val.IsNil() { + val.Set(reflect.MakeMap(val.Type())) + } + + if val.Len() > 0 && mc.DecodeZerosMap { + clearMap(val) + } + + eType := val.Type().Elem() + decoder, err := dc.LookupDecoder(eType) + if err != nil { + return err + } + eTypeDecoder, _ := decoder.(typeDecoder) + + if eType == tEmpty { + dc.Ancestor = val.Type() + } + + keyType := val.Type().Key() + + for { + key, vr, err := dr.ReadElement() + if err == bsonrw.ErrEOD { + break + } + if err != nil { + return err + } + + k, err := mc.decodeKey(key, keyType) + if err != nil { + return err + } + + elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true) + if err != nil { + return newDecodeError(key, err) + } + + val.SetMapIndex(k, elem) + } + return nil +} + +func clearMap(m reflect.Value) { + var none reflect.Value + for _, k := range m.MapKeys() { + m.SetMapIndex(k, none) + } +} + +func (mc *MapCodec) encodeKey(val reflect.Value) (string, error) { + if mc.EncodeKeysWithStringer { + return fmt.Sprint(val), nil + } + + // keys of any string type are used directly + if val.Kind() == reflect.String { + return val.String(), nil + } + // KeyMarshalers are marshaled + if km, ok := val.Interface().(KeyMarshaler); ok { + if val.Kind() == reflect.Ptr && val.IsNil() { + return "", nil + } + buf, err := km.MarshalKey() + if err == nil { + return buf, nil + } + return "", err + } + // keys implement encoding.TextMarshaler are marshaled. + if km, ok := val.Interface().(encoding.TextMarshaler); ok { + if val.Kind() == reflect.Ptr && val.IsNil() { + return "", nil + } + + buf, err := km.MarshalText() + if err != nil { + return "", err + } + + return string(buf), nil + } + + switch val.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return strconv.FormatInt(val.Int(), 10), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return strconv.FormatUint(val.Uint(), 10), nil + } + return "", fmt.Errorf("unsupported key type: %v", val.Type()) +} + +var keyUnmarshalerType = reflect.TypeOf((*KeyUnmarshaler)(nil)).Elem() +var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() + +func (mc *MapCodec) decodeKey(key string, keyType reflect.Type) (reflect.Value, error) { + keyVal := reflect.ValueOf(key) + var err error + switch { + // First, if EncodeKeysWithStringer is not enabled, try to decode withKeyUnmarshaler + case !mc.EncodeKeysWithStringer && reflect.PtrTo(keyType).Implements(keyUnmarshalerType): + keyVal = reflect.New(keyType) + v := keyVal.Interface().(KeyUnmarshaler) + err = v.UnmarshalKey(key) + keyVal = keyVal.Elem() + // Try to decode encoding.TextUnmarshalers. + case reflect.PtrTo(keyType).Implements(textUnmarshalerType): + keyVal = reflect.New(keyType) + v := keyVal.Interface().(encoding.TextUnmarshaler) + err = v.UnmarshalText([]byte(key)) + keyVal = keyVal.Elem() + // Otherwise, go to type specific behavior + default: + switch keyType.Kind() { + case reflect.String: + keyVal = reflect.ValueOf(key).Convert(keyType) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + n, parseErr := strconv.ParseInt(key, 10, 64) + if parseErr != nil || reflect.Zero(keyType).OverflowInt(n) { + err = fmt.Errorf("failed to unmarshal number key %v", key) + } + keyVal = reflect.ValueOf(n).Convert(keyType) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + n, parseErr := strconv.ParseUint(key, 10, 64) + if parseErr != nil || reflect.Zero(keyType).OverflowUint(n) { + err = fmt.Errorf("failed to unmarshal number key %v", key) + break + } + keyVal = reflect.ValueOf(n).Convert(keyType) + case reflect.Float32, reflect.Float64: + if mc.EncodeKeysWithStringer { + parsed, err := strconv.ParseFloat(key, 64) + if err != nil { + return keyVal, fmt.Errorf("Map key is defined to be a decimal type (%v) but got error %v", keyType.Kind(), err) + } + keyVal = reflect.ValueOf(parsed) + break + } + fallthrough + default: + return keyVal, fmt.Errorf("unsupported key type: %v", keyType) + } + } + return keyVal, err +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go new file mode 100644 index 0000000..fbd9f0a --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go @@ -0,0 +1,65 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import "fmt" + +type mode int + +const ( + _ mode = iota + mTopLevel + mDocument + mArray + mValue + mElement + mCodeWithScope + mSpacer +) + +func (m mode) String() string { + var str string + + switch m { + case mTopLevel: + str = "TopLevel" + case mDocument: + str = "DocumentMode" + case mArray: + str = "ArrayMode" + case mValue: + str = "ValueMode" + case mElement: + str = "ElementMode" + case mCodeWithScope: + str = "CodeWithScopeMode" + case mSpacer: + str = "CodeWithScopeSpacerFrame" + default: + str = "UnknownMode" + } + + return str +} + +// TransitionError is an error returned when an invalid progressing a +// ValueReader or ValueWriter state machine occurs. +type TransitionError struct { + parent mode + current mode + destination mode +} + +func (te TransitionError) Error() string { + if te.destination == mode(0) { + return fmt.Sprintf("invalid state transition: cannot read/write value while in %s", te.current) + } + if te.parent == mode(0) { + return fmt.Sprintf("invalid state transition: %s -> %s", te.current, te.destination) + } + return fmt.Sprintf("invalid state transition: %s -> %s; parent %s", te.current, te.destination, te.parent) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go new file mode 100644 index 0000000..616a3e7 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go @@ -0,0 +1,109 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "reflect" + "sync" + + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +var _ ValueEncoder = &PointerCodec{} +var _ ValueDecoder = &PointerCodec{} + +// PointerCodec is the Codec used for pointers. +type PointerCodec struct { + ecache map[reflect.Type]ValueEncoder + dcache map[reflect.Type]ValueDecoder + l sync.RWMutex +} + +// NewPointerCodec returns a PointerCodec that has been initialized. +func NewPointerCodec() *PointerCodec { + return &PointerCodec{ + ecache: make(map[reflect.Type]ValueEncoder), + dcache: make(map[reflect.Type]ValueDecoder), + } +} + +// EncodeValue handles encoding a pointer by either encoding it to BSON Null if the pointer is nil +// or looking up an encoder for the type of value the pointer points to. +func (pc *PointerCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if val.Kind() != reflect.Ptr { + if !val.IsValid() { + return vw.WriteNull() + } + return ValueEncoderError{Name: "PointerCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val} + } + + if val.IsNil() { + return vw.WriteNull() + } + + pc.l.RLock() + enc, ok := pc.ecache[val.Type()] + pc.l.RUnlock() + if ok { + if enc == nil { + return ErrNoEncoder{Type: val.Type()} + } + return enc.EncodeValue(ec, vw, val.Elem()) + } + + enc, err := ec.LookupEncoder(val.Type().Elem()) + pc.l.Lock() + pc.ecache[val.Type()] = enc + pc.l.Unlock() + if err != nil { + return err + } + + return enc.EncodeValue(ec, vw, val.Elem()) +} + +// DecodeValue handles decoding a pointer by looking up a decoder for the type it points to and +// using that to decode. If the BSON value is Null, this method will set the pointer to nil. +func (pc *PointerCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.Ptr { + return ValueDecoderError{Name: "PointerCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val} + } + + if vr.Type() == bsontype.Null { + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + } + if vr.Type() == bsontype.Undefined { + val.Set(reflect.Zero(val.Type())) + return vr.ReadUndefined() + } + + if val.IsNil() { + val.Set(reflect.New(val.Type().Elem())) + } + + pc.l.RLock() + dec, ok := pc.dcache[val.Type()] + pc.l.RUnlock() + if ok { + if dec == nil { + return ErrNoDecoder{Type: val.Type()} + } + return dec.DecodeValue(dc, vr, val.Elem()) + } + + dec, err := dc.LookupDecoder(val.Type().Elem()) + pc.l.Lock() + pc.dcache[val.Type()] = dec + pc.l.Unlock() + if err != nil { + return err + } + + return dec.DecodeValue(dc, vr, val.Elem()) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go new file mode 100644 index 0000000..4cf2b01 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go @@ -0,0 +1,14 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +// Proxy is an interface implemented by types that cannot themselves be directly encoded. Types +// that implement this interface with have ProxyBSON called during the encoding process and that +// value will be encoded in place for the implementer. +type Proxy interface { + ProxyBSON() (interface{}, error) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go new file mode 100644 index 0000000..8064402 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go @@ -0,0 +1,469 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "errors" + "fmt" + "reflect" + "sync" + + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +// ErrNilType is returned when nil is passed to either LookupEncoder or LookupDecoder. +var ErrNilType = errors.New("cannot perform a decoder lookup on ") + +// ErrNotPointer is returned when a non-pointer type is provided to LookupDecoder. +var ErrNotPointer = errors.New("non-pointer provided to LookupDecoder") + +// ErrNoEncoder is returned when there wasn't an encoder available for a type. +type ErrNoEncoder struct { + Type reflect.Type +} + +func (ene ErrNoEncoder) Error() string { + if ene.Type == nil { + return "no encoder found for " + } + return "no encoder found for " + ene.Type.String() +} + +// ErrNoDecoder is returned when there wasn't a decoder available for a type. +type ErrNoDecoder struct { + Type reflect.Type +} + +func (end ErrNoDecoder) Error() string { + return "no decoder found for " + end.Type.String() +} + +// ErrNoTypeMapEntry is returned when there wasn't a type available for the provided BSON type. +type ErrNoTypeMapEntry struct { + Type bsontype.Type +} + +func (entme ErrNoTypeMapEntry) Error() string { + return "no type map entry found for " + entme.Type.String() +} + +// ErrNotInterface is returned when the provided type is not an interface. +var ErrNotInterface = errors.New("The provided type is not an interface") + +// A RegistryBuilder is used to build a Registry. This type is not goroutine +// safe. +type RegistryBuilder struct { + typeEncoders map[reflect.Type]ValueEncoder + interfaceEncoders []interfaceValueEncoder + kindEncoders map[reflect.Kind]ValueEncoder + + typeDecoders map[reflect.Type]ValueDecoder + interfaceDecoders []interfaceValueDecoder + kindDecoders map[reflect.Kind]ValueDecoder + + typeMap map[bsontype.Type]reflect.Type +} + +// A Registry is used to store and retrieve codecs for types and interfaces. This type is the main +// typed passed around and Encoders and Decoders are constructed from it. +type Registry struct { + typeEncoders map[reflect.Type]ValueEncoder + typeDecoders map[reflect.Type]ValueDecoder + + interfaceEncoders []interfaceValueEncoder + interfaceDecoders []interfaceValueDecoder + + kindEncoders map[reflect.Kind]ValueEncoder + kindDecoders map[reflect.Kind]ValueDecoder + + typeMap map[bsontype.Type]reflect.Type + + mu sync.RWMutex +} + +// NewRegistryBuilder creates a new empty RegistryBuilder. +func NewRegistryBuilder() *RegistryBuilder { + return &RegistryBuilder{ + typeEncoders: make(map[reflect.Type]ValueEncoder), + typeDecoders: make(map[reflect.Type]ValueDecoder), + + interfaceEncoders: make([]interfaceValueEncoder, 0), + interfaceDecoders: make([]interfaceValueDecoder, 0), + + kindEncoders: make(map[reflect.Kind]ValueEncoder), + kindDecoders: make(map[reflect.Kind]ValueDecoder), + + typeMap: make(map[bsontype.Type]reflect.Type), + } +} + +func buildDefaultRegistry() *Registry { + rb := NewRegistryBuilder() + defaultValueEncoders.RegisterDefaultEncoders(rb) + defaultValueDecoders.RegisterDefaultDecoders(rb) + return rb.Build() +} + +// RegisterCodec will register the provided ValueCodec for the provided type. +func (rb *RegistryBuilder) RegisterCodec(t reflect.Type, codec ValueCodec) *RegistryBuilder { + rb.RegisterTypeEncoder(t, codec) + rb.RegisterTypeDecoder(t, codec) + return rb +} + +// RegisterTypeEncoder will register the provided ValueEncoder for the provided type. +// +// The type will be used directly, so an encoder can be registered for a type and a different encoder can be registered +// for a pointer to that type. +// +// If the given type is an interface, the encoder will be called when marshalling a type that is that interface. It +// will not be called when marshalling a non-interface type that implements the interface. +func (rb *RegistryBuilder) RegisterTypeEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder { + rb.typeEncoders[t] = enc + return rb +} + +// RegisterHookEncoder will register an encoder for the provided interface type t. This encoder will be called when +// marshalling a type if the type implements t or a pointer to the type implements t. If the provided type is not +// an interface (i.e. t.Kind() != reflect.Interface), this method will panic. +func (rb *RegistryBuilder) RegisterHookEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder { + if t.Kind() != reflect.Interface { + panicStr := fmt.Sprintf("RegisterHookEncoder expects a type with kind reflect.Interface, "+ + "got type %s with kind %s", t, t.Kind()) + panic(panicStr) + } + + for idx, encoder := range rb.interfaceEncoders { + if encoder.i == t { + rb.interfaceEncoders[idx].ve = enc + return rb + } + } + + rb.interfaceEncoders = append(rb.interfaceEncoders, interfaceValueEncoder{i: t, ve: enc}) + return rb +} + +// RegisterTypeDecoder will register the provided ValueDecoder for the provided type. +// +// The type will be used directly, so a decoder can be registered for a type and a different decoder can be registered +// for a pointer to that type. +// +// If the given type is an interface, the decoder will be called when unmarshalling into a type that is that interface. +// It will not be called when unmarshalling into a non-interface type that implements the interface. +func (rb *RegistryBuilder) RegisterTypeDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder { + rb.typeDecoders[t] = dec + return rb +} + +// RegisterHookDecoder will register an decoder for the provided interface type t. This decoder will be called when +// unmarshalling into a type if the type implements t or a pointer to the type implements t. If the provided type is not +// an interface (i.e. t.Kind() != reflect.Interface), this method will panic. +func (rb *RegistryBuilder) RegisterHookDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder { + if t.Kind() != reflect.Interface { + panicStr := fmt.Sprintf("RegisterHookDecoder expects a type with kind reflect.Interface, "+ + "got type %s with kind %s", t, t.Kind()) + panic(panicStr) + } + + for idx, decoder := range rb.interfaceDecoders { + if decoder.i == t { + rb.interfaceDecoders[idx].vd = dec + return rb + } + } + + rb.interfaceDecoders = append(rb.interfaceDecoders, interfaceValueDecoder{i: t, vd: dec}) + return rb +} + +// RegisterEncoder registers the provided type and encoder pair. +// +// Deprecated: Use RegisterTypeEncoder or RegisterHookEncoder instead. +func (rb *RegistryBuilder) RegisterEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder { + if t == tEmpty { + rb.typeEncoders[t] = enc + return rb + } + switch t.Kind() { + case reflect.Interface: + for idx, ir := range rb.interfaceEncoders { + if ir.i == t { + rb.interfaceEncoders[idx].ve = enc + return rb + } + } + + rb.interfaceEncoders = append(rb.interfaceEncoders, interfaceValueEncoder{i: t, ve: enc}) + default: + rb.typeEncoders[t] = enc + } + return rb +} + +// RegisterDecoder registers the provided type and decoder pair. +// +// Deprecated: Use RegisterTypeDecoder or RegisterHookDecoder instead. +func (rb *RegistryBuilder) RegisterDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder { + if t == nil { + rb.typeDecoders[nil] = dec + return rb + } + if t == tEmpty { + rb.typeDecoders[t] = dec + return rb + } + switch t.Kind() { + case reflect.Interface: + for idx, ir := range rb.interfaceDecoders { + if ir.i == t { + rb.interfaceDecoders[idx].vd = dec + return rb + } + } + + rb.interfaceDecoders = append(rb.interfaceDecoders, interfaceValueDecoder{i: t, vd: dec}) + default: + rb.typeDecoders[t] = dec + } + return rb +} + +// RegisterDefaultEncoder will registr the provided ValueEncoder to the provided +// kind. +func (rb *RegistryBuilder) RegisterDefaultEncoder(kind reflect.Kind, enc ValueEncoder) *RegistryBuilder { + rb.kindEncoders[kind] = enc + return rb +} + +// RegisterDefaultDecoder will register the provided ValueDecoder to the +// provided kind. +func (rb *RegistryBuilder) RegisterDefaultDecoder(kind reflect.Kind, dec ValueDecoder) *RegistryBuilder { + rb.kindDecoders[kind] = dec + return rb +} + +// RegisterTypeMapEntry will register the provided type to the BSON type. The primary usage for this +// mapping is decoding situations where an empty interface is used and a default type needs to be +// created and decoded into. +// +// By default, BSON documents will decode into interface{} values as bson.D. To change the default type for BSON +// documents, a type map entry for bsontype.EmbeddedDocument should be registered. For example, to force BSON documents +// to decode to bson.Raw, use the following code: +// +// rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(bson.Raw{})) +func (rb *RegistryBuilder) RegisterTypeMapEntry(bt bsontype.Type, rt reflect.Type) *RegistryBuilder { + rb.typeMap[bt] = rt + return rb +} + +// Build creates a Registry from the current state of this RegistryBuilder. +func (rb *RegistryBuilder) Build() *Registry { + registry := new(Registry) + + registry.typeEncoders = make(map[reflect.Type]ValueEncoder) + for t, enc := range rb.typeEncoders { + registry.typeEncoders[t] = enc + } + + registry.typeDecoders = make(map[reflect.Type]ValueDecoder) + for t, dec := range rb.typeDecoders { + registry.typeDecoders[t] = dec + } + + registry.interfaceEncoders = make([]interfaceValueEncoder, len(rb.interfaceEncoders)) + copy(registry.interfaceEncoders, rb.interfaceEncoders) + + registry.interfaceDecoders = make([]interfaceValueDecoder, len(rb.interfaceDecoders)) + copy(registry.interfaceDecoders, rb.interfaceDecoders) + + registry.kindEncoders = make(map[reflect.Kind]ValueEncoder) + for kind, enc := range rb.kindEncoders { + registry.kindEncoders[kind] = enc + } + + registry.kindDecoders = make(map[reflect.Kind]ValueDecoder) + for kind, dec := range rb.kindDecoders { + registry.kindDecoders[kind] = dec + } + + registry.typeMap = make(map[bsontype.Type]reflect.Type) + for bt, rt := range rb.typeMap { + registry.typeMap[bt] = rt + } + + return registry +} + +// LookupEncoder inspects the registry for an encoder for the given type. The lookup precedence works as follows: +// +// 1. An encoder registered for the exact type. If the given type represents an interface, an encoder registered using +// RegisterTypeEncoder for the interface will be selected. +// +// 2. An encoder registered using RegisterHookEncoder for an interface implemented by the type or by a pointer to the +// type. +// +// 3. An encoder registered for the reflect.Kind of the value. +// +// If no encoder is found, an error of type ErrNoEncoder is returned. +func (r *Registry) LookupEncoder(t reflect.Type) (ValueEncoder, error) { + encodererr := ErrNoEncoder{Type: t} + r.mu.RLock() + enc, found := r.lookupTypeEncoder(t) + r.mu.RUnlock() + if found { + if enc == nil { + return nil, ErrNoEncoder{Type: t} + } + return enc, nil + } + + enc, found = r.lookupInterfaceEncoder(t, true) + if found { + r.mu.Lock() + r.typeEncoders[t] = enc + r.mu.Unlock() + return enc, nil + } + + if t == nil { + r.mu.Lock() + r.typeEncoders[t] = nil + r.mu.Unlock() + return nil, encodererr + } + + enc, found = r.kindEncoders[t.Kind()] + if !found { + r.mu.Lock() + r.typeEncoders[t] = nil + r.mu.Unlock() + return nil, encodererr + } + + r.mu.Lock() + r.typeEncoders[t] = enc + r.mu.Unlock() + return enc, nil +} + +func (r *Registry) lookupTypeEncoder(t reflect.Type) (ValueEncoder, bool) { + enc, found := r.typeEncoders[t] + return enc, found +} + +func (r *Registry) lookupInterfaceEncoder(t reflect.Type, allowAddr bool) (ValueEncoder, bool) { + if t == nil { + return nil, false + } + for _, ienc := range r.interfaceEncoders { + if t.Implements(ienc.i) { + return ienc.ve, true + } + if allowAddr && t.Kind() != reflect.Ptr && reflect.PtrTo(t).Implements(ienc.i) { + // if *t implements an interface, this will catch if t implements an interface further ahead + // in interfaceEncoders + defaultEnc, found := r.lookupInterfaceEncoder(t, false) + if !found { + defaultEnc = r.kindEncoders[t.Kind()] + } + return newCondAddrEncoder(ienc.ve, defaultEnc), true + } + } + return nil, false +} + +// LookupDecoder inspects the registry for an decoder for the given type. The lookup precedence works as follows: +// +// 1. A decoder registered for the exact type. If the given type represents an interface, a decoder registered using +// RegisterTypeDecoder for the interface will be selected. +// +// 2. A decoder registered using RegisterHookDecoder for an interface implemented by the type or by a pointer to the +// type. +// +// 3. A decoder registered for the reflect.Kind of the value. +// +// If no decoder is found, an error of type ErrNoDecoder is returned. +func (r *Registry) LookupDecoder(t reflect.Type) (ValueDecoder, error) { + if t == nil { + return nil, ErrNilType + } + decodererr := ErrNoDecoder{Type: t} + r.mu.RLock() + dec, found := r.lookupTypeDecoder(t) + r.mu.RUnlock() + if found { + if dec == nil { + return nil, ErrNoDecoder{Type: t} + } + return dec, nil + } + + dec, found = r.lookupInterfaceDecoder(t, true) + if found { + r.mu.Lock() + r.typeDecoders[t] = dec + r.mu.Unlock() + return dec, nil + } + + dec, found = r.kindDecoders[t.Kind()] + if !found { + r.mu.Lock() + r.typeDecoders[t] = nil + r.mu.Unlock() + return nil, decodererr + } + + r.mu.Lock() + r.typeDecoders[t] = dec + r.mu.Unlock() + return dec, nil +} + +func (r *Registry) lookupTypeDecoder(t reflect.Type) (ValueDecoder, bool) { + dec, found := r.typeDecoders[t] + return dec, found +} + +func (r *Registry) lookupInterfaceDecoder(t reflect.Type, allowAddr bool) (ValueDecoder, bool) { + for _, idec := range r.interfaceDecoders { + if t.Implements(idec.i) { + return idec.vd, true + } + if allowAddr && t.Kind() != reflect.Ptr && reflect.PtrTo(t).Implements(idec.i) { + // if *t implements an interface, this will catch if t implements an interface further ahead + // in interfaceDecoders + defaultDec, found := r.lookupInterfaceDecoder(t, false) + if !found { + defaultDec = r.kindDecoders[t.Kind()] + } + return newCondAddrDecoder(idec.vd, defaultDec), true + } + } + return nil, false +} + +// LookupTypeMapEntry inspects the registry's type map for a Go type for the corresponding BSON +// type. If no type is found, ErrNoTypeMapEntry is returned. +func (r *Registry) LookupTypeMapEntry(bt bsontype.Type) (reflect.Type, error) { + t, ok := r.typeMap[bt] + if !ok || t == nil { + return nil, ErrNoTypeMapEntry{Type: bt} + } + return t, nil +} + +type interfaceValueEncoder struct { + i reflect.Type + ve ValueEncoder +} + +type interfaceValueDecoder struct { + i reflect.Type + vd ValueDecoder +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go new file mode 100644 index 0000000..3c1b6b8 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go @@ -0,0 +1,199 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "fmt" + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +var defaultSliceCodec = NewSliceCodec() + +// SliceCodec is the Codec used for slice values. +type SliceCodec struct { + EncodeNilAsEmpty bool +} + +var _ ValueCodec = &MapCodec{} + +// NewSliceCodec returns a MapCodec with options opts. +func NewSliceCodec(opts ...*bsonoptions.SliceCodecOptions) *SliceCodec { + sliceOpt := bsonoptions.MergeSliceCodecOptions(opts...) + + codec := SliceCodec{} + if sliceOpt.EncodeNilAsEmpty != nil { + codec.EncodeNilAsEmpty = *sliceOpt.EncodeNilAsEmpty + } + return &codec +} + +// EncodeValue is the ValueEncoder for slice types. +func (sc SliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Slice { + return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + if val.IsNil() && !sc.EncodeNilAsEmpty { + return vw.WriteNull() + } + + // If we have a []byte we want to treat it as a binary instead of as an array. + if val.Type().Elem() == tByte { + var byteSlice []byte + for idx := 0; idx < val.Len(); idx++ { + byteSlice = append(byteSlice, val.Index(idx).Interface().(byte)) + } + return vw.WriteBinary(byteSlice) + } + + // If we have a []primitive.E we want to treat it as a document instead of as an array. + if val.Type().ConvertibleTo(tD) { + d := val.Convert(tD).Interface().(primitive.D) + + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + for _, e := range d { + err = encodeElement(ec, dw, e) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() + } + + aw, err := vw.WriteArray() + if err != nil { + return err + } + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.Index(idx)) + if lookupErr != nil && lookupErr != errInvalidValue { + return lookupErr + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + if lookupErr == errInvalidValue { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + return aw.WriteArrayEnd() +} + +// DecodeValue is the ValueDecoder for slice types. +func (sc *SliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.Slice { + return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case bsontype.Array: + case bsontype.Null: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + case bsontype.Undefined: + val.Set(reflect.Zero(val.Type())) + return vr.ReadUndefined() + case bsontype.Type(0), bsontype.EmbeddedDocument: + if val.Type().Elem() != tE { + return fmt.Errorf("cannot decode document into %s", val.Type()) + } + case bsontype.Binary: + if val.Type().Elem() != tByte { + return fmt.Errorf("SliceDecodeValue can only decode a binary into a byte array, got %v", vrType) + } + data, subtype, err := vr.ReadBinary() + if err != nil { + return err + } + if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld { + return fmt.Errorf("SliceDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype) + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, len(data))) + } + + val.SetLen(0) + for _, elem := range data { + val.Set(reflect.Append(val, reflect.ValueOf(elem))) + } + return nil + case bsontype.String: + if sliceType := val.Type().Elem(); sliceType != tByte { + return fmt.Errorf("SliceDecodeValue can only decode a string into a byte array, got %v", sliceType) + } + str, err := vr.ReadString() + if err != nil { + return err + } + byteStr := []byte(str) + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, len(byteStr))) + } + + val.SetLen(0) + for _, elem := range byteStr { + val.Set(reflect.Append(val, reflect.ValueOf(elem))) + } + return nil + default: + return fmt.Errorf("cannot decode %v into a slice", vrType) + } + + var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error) + switch val.Type().Elem() { + case tE: + dc.Ancestor = val.Type() + elemsFunc = defaultValueDecoders.decodeD + default: + elemsFunc = defaultValueDecoders.decodeDefault + } + + elems, err := elemsFunc(dc, vr, val) + if err != nil { + return err + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, len(elems))) + } + + val.SetLen(0) + val.Set(reflect.Append(val, elems...)) + + return nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go new file mode 100644 index 0000000..5332b7c --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go @@ -0,0 +1,119 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "fmt" + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +// StringCodec is the Codec used for struct values. +type StringCodec struct { + DecodeObjectIDAsHex bool +} + +var ( + defaultStringCodec = NewStringCodec() + + _ ValueCodec = defaultStringCodec + _ typeDecoder = defaultStringCodec +) + +// NewStringCodec returns a StringCodec with options opts. +func NewStringCodec(opts ...*bsonoptions.StringCodecOptions) *StringCodec { + stringOpt := bsonoptions.MergeStringCodecOptions(opts...) + return &StringCodec{*stringOpt.DecodeObjectIDAsHex} +} + +// EncodeValue is the ValueEncoder for string types. +func (sc *StringCodec) EncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if val.Kind() != reflect.String { + return ValueEncoderError{ + Name: "StringEncodeValue", + Kinds: []reflect.Kind{reflect.String}, + Received: val, + } + } + + return vw.WriteString(val.String()) +} + +func (sc *StringCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t.Kind() != reflect.String { + return emptyValue, ValueDecoderError{ + Name: "StringDecodeValue", + Kinds: []reflect.Kind{reflect.String}, + Received: reflect.Zero(t), + } + } + + var str string + var err error + switch vr.Type() { + case bsontype.String: + str, err = vr.ReadString() + if err != nil { + return emptyValue, err + } + case bsontype.ObjectID: + oid, err := vr.ReadObjectID() + if err != nil { + return emptyValue, err + } + if sc.DecodeObjectIDAsHex { + str = oid.Hex() + } else { + byteArray := [12]byte(oid) + str = string(byteArray[:]) + } + case bsontype.Symbol: + str, err = vr.ReadSymbol() + if err != nil { + return emptyValue, err + } + case bsontype.Binary: + data, subtype, err := vr.ReadBinary() + if err != nil { + return emptyValue, err + } + if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld { + return emptyValue, decodeBinaryError{subtype: subtype, typeName: "string"} + } + str = string(data) + case bsontype.Null: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into a string type", vr.Type()) + } + + return reflect.ValueOf(str), nil +} + +// DecodeValue is the ValueDecoder for string types. +func (sc *StringCodec) DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.String { + return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val} + } + + elem, err := sc.decodeType(dctx, vr, val.Type()) + if err != nil { + return err + } + + val.SetString(elem.String()) + return nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go new file mode 100644 index 0000000..da1ae18 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go @@ -0,0 +1,669 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "errors" + "fmt" + "reflect" + "sort" + "strings" + "sync" + "time" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +// DecodeError represents an error that occurs when unmarshalling BSON bytes into a native Go type. +type DecodeError struct { + keys []string + wrapped error +} + +// Unwrap returns the underlying error +func (de *DecodeError) Unwrap() error { + return de.wrapped +} + +// Error implements the error interface. +func (de *DecodeError) Error() string { + // The keys are stored in reverse order because the de.keys slice is builtup while propagating the error up the + // stack of BSON keys, so we call de.Keys(), which reverses them. + keyPath := strings.Join(de.Keys(), ".") + return fmt.Sprintf("error decoding key %s: %v", keyPath, de.wrapped) +} + +// Keys returns the BSON key path that caused an error as a slice of strings. The keys in the slice are in top-down +// order. For example, if the document being unmarshalled was {a: {b: {c: 1}}} and the value for c was supposed to be +// a string, the keys slice will be ["a", "b", "c"]. +func (de *DecodeError) Keys() []string { + reversedKeys := make([]string, 0, len(de.keys)) + for idx := len(de.keys) - 1; idx >= 0; idx-- { + reversedKeys = append(reversedKeys, de.keys[idx]) + } + + return reversedKeys +} + +// Zeroer allows custom struct types to implement a report of zero +// state. All struct types that don't implement Zeroer or where IsZero +// returns false are considered to be not zero. +type Zeroer interface { + IsZero() bool +} + +// StructCodec is the Codec used for struct values. +type StructCodec struct { + cache map[reflect.Type]*structDescription + l sync.RWMutex + parser StructTagParser + DecodeZeroStruct bool + DecodeDeepZeroInline bool + EncodeOmitDefaultStruct bool + AllowUnexportedFields bool + OverwriteDuplicatedInlinedFields bool +} + +var _ ValueEncoder = &StructCodec{} +var _ ValueDecoder = &StructCodec{} + +// NewStructCodec returns a StructCodec that uses p for struct tag parsing. +func NewStructCodec(p StructTagParser, opts ...*bsonoptions.StructCodecOptions) (*StructCodec, error) { + if p == nil { + return nil, errors.New("a StructTagParser must be provided to NewStructCodec") + } + + structOpt := bsonoptions.MergeStructCodecOptions(opts...) + + codec := &StructCodec{ + cache: make(map[reflect.Type]*structDescription), + parser: p, + } + + if structOpt.DecodeZeroStruct != nil { + codec.DecodeZeroStruct = *structOpt.DecodeZeroStruct + } + if structOpt.DecodeDeepZeroInline != nil { + codec.DecodeDeepZeroInline = *structOpt.DecodeDeepZeroInline + } + if structOpt.EncodeOmitDefaultStruct != nil { + codec.EncodeOmitDefaultStruct = *structOpt.EncodeOmitDefaultStruct + } + if structOpt.OverwriteDuplicatedInlinedFields != nil { + codec.OverwriteDuplicatedInlinedFields = *structOpt.OverwriteDuplicatedInlinedFields + } + if structOpt.AllowUnexportedFields != nil { + codec.AllowUnexportedFields = *structOpt.AllowUnexportedFields + } + + return codec, nil +} + +// EncodeValue handles encoding generic struct types. +func (sc *StructCodec) EncodeValue(r EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Struct { + return ValueEncoderError{Name: "StructCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val} + } + + sd, err := sc.describeStruct(r.Registry, val.Type()) + if err != nil { + return err + } + + dw, err := vw.WriteDocument() + if err != nil { + return err + } + var rv reflect.Value + for _, desc := range sd.fl { + if desc.inline == nil { + rv = val.Field(desc.idx) + } else { + rv, err = fieldByIndexErr(val, desc.inline) + if err != nil { + continue + } + } + + desc.encoder, rv, err = defaultValueEncoders.lookupElementEncoder(r, desc.encoder, rv) + + if err != nil && err != errInvalidValue { + return err + } + + if err == errInvalidValue { + if desc.omitEmpty { + continue + } + vw2, err := dw.WriteDocumentElement(desc.name) + if err != nil { + return err + } + err = vw2.WriteNull() + if err != nil { + return err + } + continue + } + + if desc.encoder == nil { + return ErrNoEncoder{Type: rv.Type()} + } + + encoder := desc.encoder + + var isZero bool + rvInterface := rv.Interface() + if cz, ok := encoder.(CodecZeroer); ok { + isZero = cz.IsTypeZero(rvInterface) + } else if rv.Kind() == reflect.Interface { + // sc.isZero will not treat an interface rv as an interface, so we need to check for the zero interface separately. + isZero = rv.IsNil() + } else { + isZero = sc.isZero(rvInterface) + } + if desc.omitEmpty && isZero { + continue + } + + vw2, err := dw.WriteDocumentElement(desc.name) + if err != nil { + return err + } + + ectx := EncodeContext{Registry: r.Registry, MinSize: desc.minSize} + err = encoder.EncodeValue(ectx, vw2, rv) + if err != nil { + return err + } + } + + if sd.inlineMap >= 0 { + rv := val.Field(sd.inlineMap) + collisionFn := func(key string) bool { + _, exists := sd.fm[key] + return exists + } + + return defaultMapCodec.mapEncodeValue(r, dw, rv, collisionFn) + } + + return dw.WriteDocumentEnd() +} + +func newDecodeError(key string, original error) error { + de, ok := original.(*DecodeError) + if !ok { + return &DecodeError{ + keys: []string{key}, + wrapped: original, + } + } + + de.keys = append(de.keys, key) + return de +} + +// DecodeValue implements the Codec interface. +// By default, map types in val will not be cleared. If a map has existing key/value pairs, it will be extended with the new ones from vr. +// For slices, the decoder will set the length of the slice to zero and append all elements. The underlying array will not be cleared. +func (sc *StructCodec) DecodeValue(r DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.Struct { + return ValueDecoderError{Name: "StructCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case bsontype.Type(0), bsontype.EmbeddedDocument: + case bsontype.Null: + if err := vr.ReadNull(); err != nil { + return err + } + + val.Set(reflect.Zero(val.Type())) + return nil + case bsontype.Undefined: + if err := vr.ReadUndefined(); err != nil { + return err + } + + val.Set(reflect.Zero(val.Type())) + return nil + default: + return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type()) + } + + sd, err := sc.describeStruct(r.Registry, val.Type()) + if err != nil { + return err + } + + if sc.DecodeZeroStruct { + val.Set(reflect.Zero(val.Type())) + } + if sc.DecodeDeepZeroInline && sd.inline { + val.Set(deepZero(val.Type())) + } + + var decoder ValueDecoder + var inlineMap reflect.Value + if sd.inlineMap >= 0 { + inlineMap = val.Field(sd.inlineMap) + decoder, err = r.LookupDecoder(inlineMap.Type().Elem()) + if err != nil { + return err + } + } + + dr, err := vr.ReadDocument() + if err != nil { + return err + } + + for { + name, vr, err := dr.ReadElement() + if err == bsonrw.ErrEOD { + break + } + if err != nil { + return err + } + + fd, exists := sd.fm[name] + if !exists { + // if the original name isn't found in the struct description, try again with the name in lowercase + // this could match if a BSON tag isn't specified because by default, describeStruct lowercases all field + // names + fd, exists = sd.fm[strings.ToLower(name)] + } + + if !exists { + if sd.inlineMap < 0 { + // The encoding/json package requires a flag to return on error for non-existent fields. + // This functionality seems appropriate for the struct codec. + err = vr.Skip() + if err != nil { + return err + } + continue + } + + if inlineMap.IsNil() { + inlineMap.Set(reflect.MakeMap(inlineMap.Type())) + } + + elem := reflect.New(inlineMap.Type().Elem()).Elem() + r.Ancestor = inlineMap.Type() + err = decoder.DecodeValue(r, vr, elem) + if err != nil { + return err + } + inlineMap.SetMapIndex(reflect.ValueOf(name), elem) + continue + } + + var field reflect.Value + if fd.inline == nil { + field = val.Field(fd.idx) + } else { + field, err = getInlineField(val, fd.inline) + if err != nil { + return err + } + } + + if !field.CanSet() { // Being settable is a super set of being addressable. + innerErr := fmt.Errorf("field %v is not settable", field) + return newDecodeError(fd.name, innerErr) + } + if field.Kind() == reflect.Ptr && field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + field = field.Addr() + + dctx := DecodeContext{ + Registry: r.Registry, + Truncate: fd.truncate || r.Truncate, + defaultDocumentType: r.defaultDocumentType, + } + + if fd.decoder == nil { + return newDecodeError(fd.name, ErrNoDecoder{Type: field.Elem().Type()}) + } + + err = fd.decoder.DecodeValue(dctx, vr, field.Elem()) + if err != nil { + return newDecodeError(fd.name, err) + } + } + + return nil +} + +func (sc *StructCodec) isZero(i interface{}) bool { + v := reflect.ValueOf(i) + + // check the value validity + if !v.IsValid() { + return true + } + + if z, ok := v.Interface().(Zeroer); ok && (v.Kind() != reflect.Ptr || !v.IsNil()) { + return z.IsZero() + } + + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflect.Struct: + if sc.EncodeOmitDefaultStruct { + vt := v.Type() + if vt == tTime { + return v.Interface().(time.Time).IsZero() + } + for i := 0; i < v.NumField(); i++ { + if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous { + continue // Private field + } + fld := v.Field(i) + if !sc.isZero(fld.Interface()) { + return false + } + } + return true + } + } + + return false +} + +type structDescription struct { + fm map[string]fieldDescription + fl []fieldDescription + inlineMap int + inline bool +} + +type fieldDescription struct { + name string // BSON key name + fieldName string // struct field name + idx int + omitEmpty bool + minSize bool + truncate bool + inline []int + encoder ValueEncoder + decoder ValueDecoder +} + +type byIndex []fieldDescription + +func (bi byIndex) Len() int { return len(bi) } + +func (bi byIndex) Swap(i, j int) { bi[i], bi[j] = bi[j], bi[i] } + +func (bi byIndex) Less(i, j int) bool { + // If a field is inlined, its index in the top level struct is stored at inline[0] + iIdx, jIdx := bi[i].idx, bi[j].idx + if len(bi[i].inline) > 0 { + iIdx = bi[i].inline[0] + } + if len(bi[j].inline) > 0 { + jIdx = bi[j].inline[0] + } + if iIdx != jIdx { + return iIdx < jIdx + } + for k, biik := range bi[i].inline { + if k >= len(bi[j].inline) { + return false + } + if biik != bi[j].inline[k] { + return biik < bi[j].inline[k] + } + } + return len(bi[i].inline) < len(bi[j].inline) +} + +func (sc *StructCodec) describeStruct(r *Registry, t reflect.Type) (*structDescription, error) { + // We need to analyze the struct, including getting the tags, collecting + // information about inlining, and create a map of the field name to the field. + sc.l.RLock() + ds, exists := sc.cache[t] + sc.l.RUnlock() + if exists { + return ds, nil + } + + numFields := t.NumField() + sd := &structDescription{ + fm: make(map[string]fieldDescription, numFields), + fl: make([]fieldDescription, 0, numFields), + inlineMap: -1, + } + + var fields []fieldDescription + for i := 0; i < numFields; i++ { + sf := t.Field(i) + if sf.PkgPath != "" && (!sc.AllowUnexportedFields || !sf.Anonymous) { + // field is private or unexported fields aren't allowed, ignore + continue + } + + sfType := sf.Type + encoder, err := r.LookupEncoder(sfType) + if err != nil { + encoder = nil + } + decoder, err := r.LookupDecoder(sfType) + if err != nil { + decoder = nil + } + + description := fieldDescription{ + fieldName: sf.Name, + idx: i, + encoder: encoder, + decoder: decoder, + } + + stags, err := sc.parser.ParseStructTags(sf) + if err != nil { + return nil, err + } + if stags.Skip { + continue + } + description.name = stags.Name + description.omitEmpty = stags.OmitEmpty + description.minSize = stags.MinSize + description.truncate = stags.Truncate + + if stags.Inline { + sd.inline = true + switch sfType.Kind() { + case reflect.Map: + if sd.inlineMap >= 0 { + return nil, errors.New("(struct " + t.String() + ") multiple inline maps") + } + if sfType.Key() != tString { + return nil, errors.New("(struct " + t.String() + ") inline map must have a string keys") + } + sd.inlineMap = description.idx + case reflect.Ptr: + sfType = sfType.Elem() + if sfType.Kind() != reflect.Struct { + return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String()) + } + fallthrough + case reflect.Struct: + inlinesf, err := sc.describeStruct(r, sfType) + if err != nil { + return nil, err + } + for _, fd := range inlinesf.fl { + if fd.inline == nil { + fd.inline = []int{i, fd.idx} + } else { + fd.inline = append([]int{i}, fd.inline...) + } + fields = append(fields, fd) + + } + default: + return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String()) + } + continue + } + fields = append(fields, description) + } + + // Sort fieldDescriptions by name and use dominance rules to determine which should be added for each name + sort.Slice(fields, func(i, j int) bool { + x := fields + // sort field by name, breaking ties with depth, then + // breaking ties with index sequence. + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].inline) != len(x[j].inline) { + return len(x[i].inline) < len(x[j].inline) + } + return byIndex(x).Less(i, j) + }) + + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + sd.fl = append(sd.fl, fi) + sd.fm[name] = fi + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if !ok || !sc.OverwriteDuplicatedInlinedFields { + return nil, fmt.Errorf("struct %s has duplicated key %s", t.String(), name) + } + sd.fl = append(sd.fl, dominant) + sd.fm[name] = dominant + } + + sort.Sort(byIndex(sd.fl)) + + sc.l.Lock() + sc.cache[t] = sd + sc.l.Unlock() + + return sd, nil +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's inlining rules. If there are multiple top-level +// fields, the boolean will be false: This condition is an error in Go +// and we skip all the fields. +func dominantField(fields []fieldDescription) (fieldDescription, bool) { + // The fields are sorted in increasing index-length order, then by presence of tag. + // That means that the first field is the dominant one. We need only check + // for error cases: two fields at top level. + if len(fields) > 1 && + len(fields[0].inline) == len(fields[1].inline) { + return fieldDescription{}, false + } + return fields[0], true +} + +func fieldByIndexErr(v reflect.Value, index []int) (result reflect.Value, err error) { + defer func() { + if recovered := recover(); recovered != nil { + switch r := recovered.(type) { + case string: + err = fmt.Errorf("%s", r) + case error: + err = r + } + } + }() + + result = v.FieldByIndex(index) + return +} + +func getInlineField(val reflect.Value, index []int) (reflect.Value, error) { + field, err := fieldByIndexErr(val, index) + if err == nil { + return field, nil + } + + // if parent of this element doesn't exist, fix its parent + inlineParent := index[:len(index)-1] + var fParent reflect.Value + if fParent, err = fieldByIndexErr(val, inlineParent); err != nil { + fParent, err = getInlineField(val, inlineParent) + if err != nil { + return fParent, err + } + } + fParent.Set(reflect.New(fParent.Type().Elem())) + + return fieldByIndexErr(val, index) +} + +// DeepZero returns recursive zero object +func deepZero(st reflect.Type) (result reflect.Value) { + result = reflect.Indirect(reflect.New(st)) + + if result.Kind() == reflect.Struct { + for i := 0; i < result.NumField(); i++ { + if f := result.Field(i); f.Kind() == reflect.Ptr { + if f.CanInterface() { + if ft := reflect.TypeOf(f.Interface()); ft.Elem().Kind() == reflect.Struct { + result.Field(i).Set(recursivePointerTo(deepZero(ft.Elem()))) + } + } + } + } + } + + return +} + +// recursivePointerTo calls reflect.New(v.Type) but recursively for its fields inside +func recursivePointerTo(v reflect.Value) reflect.Value { + v = reflect.Indirect(v) + result := reflect.New(v.Type()) + if v.Kind() == reflect.Struct { + for i := 0; i < v.NumField(); i++ { + if f := v.Field(i); f.Kind() == reflect.Ptr { + if f.Elem().Kind() == reflect.Struct { + result.Elem().Field(i).Set(recursivePointerTo(f)) + } + } + } + } + + return result +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go new file mode 100644 index 0000000..62708c5 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go @@ -0,0 +1,139 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "reflect" + "strings" +) + +// StructTagParser returns the struct tags for a given struct field. +type StructTagParser interface { + ParseStructTags(reflect.StructField) (StructTags, error) +} + +// StructTagParserFunc is an adapter that allows a generic function to be used +// as a StructTagParser. +type StructTagParserFunc func(reflect.StructField) (StructTags, error) + +// ParseStructTags implements the StructTagParser interface. +func (stpf StructTagParserFunc) ParseStructTags(sf reflect.StructField) (StructTags, error) { + return stpf(sf) +} + +// StructTags represents the struct tag fields that the StructCodec uses during +// the encoding and decoding process. +// +// In the case of a struct, the lowercased field name is used as the key for each exported +// field but this behavior may be changed using a struct tag. The tag may also contain flags to +// adjust the marshalling behavior for the field. +// +// The properties are defined below: +// +// OmitEmpty Only include the field if it's not set to the zero value for the type or to +// empty slices or maps. +// +// MinSize Marshal an integer of a type larger than 32 bits value as an int32, if that's +// feasible while preserving the numeric value. +// +// Truncate When unmarshaling a BSON double, it is permitted to lose precision to fit within +// a float32. +// +// Inline Inline the field, which must be a struct or a map, causing all of its fields +// or keys to be processed as if they were part of the outer struct. For maps, +// keys must not conflict with the bson keys of other struct fields. +// +// Skip This struct field should be skipped. This is usually denoted by parsing a "-" +// for the name. +// +// TODO(skriptble): Add tags for undefined as nil and for null as nil. +type StructTags struct { + Name string + OmitEmpty bool + MinSize bool + Truncate bool + Inline bool + Skip bool +} + +// DefaultStructTagParser is the StructTagParser used by the StructCodec by default. +// It will handle the bson struct tag. See the documentation for StructTags to see +// what each of the returned fields means. +// +// If there is no name in the struct tag fields, the struct field name is lowercased. +// The tag formats accepted are: +// +// "[][,[,]]" +// +// `(...) bson:"[][,[,]]" (...)` +// +// An example: +// +// type T struct { +// A bool +// B int "myb" +// C string "myc,omitempty" +// D string `bson:",omitempty" json:"jsonkey"` +// E int64 ",minsize" +// F int64 "myf,omitempty,minsize" +// } +// +// A struct tag either consisting entirely of '-' or with a bson key with a +// value consisting entirely of '-' will return a StructTags with Skip true and +// the remaining fields will be their default values. +var DefaultStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) { + key := strings.ToLower(sf.Name) + tag, ok := sf.Tag.Lookup("bson") + if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 { + tag = string(sf.Tag) + } + return parseTags(key, tag) +} + +func parseTags(key string, tag string) (StructTags, error) { + var st StructTags + if tag == "-" { + st.Skip = true + return st, nil + } + + for idx, str := range strings.Split(tag, ",") { + if idx == 0 && str != "" { + key = str + } + switch str { + case "omitempty": + st.OmitEmpty = true + case "minsize": + st.MinSize = true + case "truncate": + st.Truncate = true + case "inline": + st.Inline = true + } + } + + st.Name = key + + return st, nil +} + +// JSONFallbackStructTagParser has the same behavior as DefaultStructTagParser +// but will also fallback to parsing the json tag instead on a field where the +// bson tag isn't available. +var JSONFallbackStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) { + key := strings.ToLower(sf.Name) + tag, ok := sf.Tag.Lookup("bson") + if !ok { + tag, ok = sf.Tag.Lookup("json") + } + if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 { + tag = string(sf.Tag) + } + + return parseTags(key, tag) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go new file mode 100644 index 0000000..ec7e30f --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go @@ -0,0 +1,127 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "fmt" + "reflect" + "time" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +const ( + timeFormatString = "2006-01-02T15:04:05.999Z07:00" +) + +// TimeCodec is the Codec used for time.Time values. +type TimeCodec struct { + UseLocalTimeZone bool +} + +var ( + defaultTimeCodec = NewTimeCodec() + + _ ValueCodec = defaultTimeCodec + _ typeDecoder = defaultTimeCodec +) + +// NewTimeCodec returns a TimeCodec with options opts. +func NewTimeCodec(opts ...*bsonoptions.TimeCodecOptions) *TimeCodec { + timeOpt := bsonoptions.MergeTimeCodecOptions(opts...) + + codec := TimeCodec{} + if timeOpt.UseLocalTimeZone != nil { + codec.UseLocalTimeZone = *timeOpt.UseLocalTimeZone + } + return &codec +} + +func (tc *TimeCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tTime { + return emptyValue, ValueDecoderError{ + Name: "TimeDecodeValue", + Types: []reflect.Type{tTime}, + Received: reflect.Zero(t), + } + } + + var timeVal time.Time + switch vrType := vr.Type(); vrType { + case bsontype.DateTime: + dt, err := vr.ReadDateTime() + if err != nil { + return emptyValue, err + } + timeVal = time.Unix(dt/1000, dt%1000*1000000) + case bsontype.String: + // assume strings are in the isoTimeFormat + timeStr, err := vr.ReadString() + if err != nil { + return emptyValue, err + } + timeVal, err = time.Parse(timeFormatString, timeStr) + if err != nil { + return emptyValue, err + } + case bsontype.Int64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + timeVal = time.Unix(i64/1000, i64%1000*1000000) + case bsontype.Timestamp: + t, _, err := vr.ReadTimestamp() + if err != nil { + return emptyValue, err + } + timeVal = time.Unix(int64(t), 0) + case bsontype.Null: + if err := vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err := vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into a time.Time", vrType) + } + + if !tc.UseLocalTimeZone { + timeVal = timeVal.UTC() + } + return reflect.ValueOf(timeVal), nil +} + +// DecodeValue is the ValueDecoderFunc for time.Time. +func (tc *TimeCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tTime { + return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val} + } + + elem, err := tc.decodeType(dc, vr, tTime) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// EncodeValue is the ValueEncoderFunc for time.TIme. +func (tc *TimeCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tTime { + return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val} + } + tt := val.Interface().(time.Time) + dt := primitive.NewDateTimeFromTime(tt) + return vw.WriteDateTime(int64(dt)) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go new file mode 100644 index 0000000..07f4b70 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go @@ -0,0 +1,57 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "encoding/json" + "net/url" + "reflect" + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" +) + +var tBool = reflect.TypeOf(false) +var tFloat64 = reflect.TypeOf(float64(0)) +var tInt32 = reflect.TypeOf(int32(0)) +var tInt64 = reflect.TypeOf(int64(0)) +var tString = reflect.TypeOf("") +var tTime = reflect.TypeOf(time.Time{}) + +var tEmpty = reflect.TypeOf((*interface{})(nil)).Elem() +var tByteSlice = reflect.TypeOf([]byte(nil)) +var tByte = reflect.TypeOf(byte(0x00)) +var tURL = reflect.TypeOf(url.URL{}) +var tJSONNumber = reflect.TypeOf(json.Number("")) + +var tValueMarshaler = reflect.TypeOf((*ValueMarshaler)(nil)).Elem() +var tValueUnmarshaler = reflect.TypeOf((*ValueUnmarshaler)(nil)).Elem() +var tMarshaler = reflect.TypeOf((*Marshaler)(nil)).Elem() +var tUnmarshaler = reflect.TypeOf((*Unmarshaler)(nil)).Elem() +var tProxy = reflect.TypeOf((*Proxy)(nil)).Elem() + +var tBinary = reflect.TypeOf(primitive.Binary{}) +var tUndefined = reflect.TypeOf(primitive.Undefined{}) +var tOID = reflect.TypeOf(primitive.ObjectID{}) +var tDateTime = reflect.TypeOf(primitive.DateTime(0)) +var tNull = reflect.TypeOf(primitive.Null{}) +var tRegex = reflect.TypeOf(primitive.Regex{}) +var tCodeWithScope = reflect.TypeOf(primitive.CodeWithScope{}) +var tDBPointer = reflect.TypeOf(primitive.DBPointer{}) +var tJavaScript = reflect.TypeOf(primitive.JavaScript("")) +var tSymbol = reflect.TypeOf(primitive.Symbol("")) +var tTimestamp = reflect.TypeOf(primitive.Timestamp{}) +var tDecimal = reflect.TypeOf(primitive.Decimal128{}) +var tMinKey = reflect.TypeOf(primitive.MinKey{}) +var tMaxKey = reflect.TypeOf(primitive.MaxKey{}) +var tD = reflect.TypeOf(primitive.D{}) +var tA = reflect.TypeOf(primitive.A{}) +var tE = reflect.TypeOf(primitive.E{}) + +var tCoreDocument = reflect.TypeOf(bsoncore.Document{}) +var tCoreArray = reflect.TypeOf(bsoncore.Array{}) diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go new file mode 100644 index 0000000..0b21ce9 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go @@ -0,0 +1,173 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsoncodec + +import ( + "fmt" + "math" + "reflect" + + "go.mongodb.org/mongo-driver/bson/bsonoptions" + "go.mongodb.org/mongo-driver/bson/bsonrw" + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +// UIntCodec is the Codec used for uint values. +type UIntCodec struct { + EncodeToMinSize bool +} + +var ( + defaultUIntCodec = NewUIntCodec() + + _ ValueCodec = defaultUIntCodec + _ typeDecoder = defaultUIntCodec +) + +// NewUIntCodec returns a UIntCodec with options opts. +func NewUIntCodec(opts ...*bsonoptions.UIntCodecOptions) *UIntCodec { + uintOpt := bsonoptions.MergeUIntCodecOptions(opts...) + + codec := UIntCodec{} + if uintOpt.EncodeToMinSize != nil { + codec.EncodeToMinSize = *uintOpt.EncodeToMinSize + } + return &codec +} + +// EncodeValue is the ValueEncoder for uint types. +func (uic *UIntCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Uint8, reflect.Uint16: + return vw.WriteInt32(int32(val.Uint())) + case reflect.Uint, reflect.Uint32, reflect.Uint64: + u64 := val.Uint() + + // If ec.MinSize or if encodeToMinSize is true for a non-uint64 value we should write val as an int32 + useMinSize := ec.MinSize || (uic.EncodeToMinSize && val.Kind() != reflect.Uint64) + + if u64 <= math.MaxInt32 && useMinSize { + return vw.WriteInt32(int32(u64)) + } + if u64 > math.MaxInt64 { + return fmt.Errorf("%d overflows int64", u64) + } + return vw.WriteInt64(int64(u64)) + } + + return ValueEncoderError{ + Name: "UintEncodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: val, + } +} + +func (uic *UIntCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) { + var i64 int64 + var err error + switch vrType := vr.Type(); vrType { + case bsontype.Int32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + i64 = int64(i32) + case bsontype.Int64: + i64, err = vr.ReadInt64() + if err != nil { + return emptyValue, err + } + case bsontype.Double: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + if !dc.Truncate && math.Floor(f64) != f64 { + return emptyValue, errCannotTruncate + } + if f64 > float64(math.MaxInt64) { + return emptyValue, fmt.Errorf("%g overflows int64", f64) + } + i64 = int64(f64) + case bsontype.Boolean: + b, err := vr.ReadBoolean() + if err != nil { + return emptyValue, err + } + if b { + i64 = 1 + } + case bsontype.Null: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case bsontype.Undefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType) + } + + switch t.Kind() { + case reflect.Uint8: + if i64 < 0 || i64 > math.MaxUint8 { + return emptyValue, fmt.Errorf("%d overflows uint8", i64) + } + + return reflect.ValueOf(uint8(i64)), nil + case reflect.Uint16: + if i64 < 0 || i64 > math.MaxUint16 { + return emptyValue, fmt.Errorf("%d overflows uint16", i64) + } + + return reflect.ValueOf(uint16(i64)), nil + case reflect.Uint32: + if i64 < 0 || i64 > math.MaxUint32 { + return emptyValue, fmt.Errorf("%d overflows uint32", i64) + } + + return reflect.ValueOf(uint32(i64)), nil + case reflect.Uint64: + if i64 < 0 { + return emptyValue, fmt.Errorf("%d overflows uint64", i64) + } + + return reflect.ValueOf(uint64(i64)), nil + case reflect.Uint: + if i64 < 0 || int64(uint(i64)) != i64 { // Can we fit this inside of an uint + return emptyValue, fmt.Errorf("%d overflows uint", i64) + } + + return reflect.ValueOf(uint(i64)), nil + default: + return emptyValue, ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.Zero(t), + } + } +} + +// DecodeValue is the ValueDecoder for uint types. +func (uic *UIntCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { + if !val.CanSet() { + return ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: val, + } + } + + elem, err := uic.decodeType(dc, vr, val.Type()) + if err != nil { + return err + } + + val.SetUint(elem.Uint()) + return nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go new file mode 100644 index 0000000..b1256a4 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go @@ -0,0 +1,38 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// ByteSliceCodecOptions represents all possible options for byte slice encoding and decoding. +type ByteSliceCodecOptions struct { + EncodeNilAsEmpty *bool // Specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false. +} + +// ByteSliceCodec creates a new *ByteSliceCodecOptions +func ByteSliceCodec() *ByteSliceCodecOptions { + return &ByteSliceCodecOptions{} +} + +// SetEncodeNilAsEmpty specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false. +func (bs *ByteSliceCodecOptions) SetEncodeNilAsEmpty(b bool) *ByteSliceCodecOptions { + bs.EncodeNilAsEmpty = &b + return bs +} + +// MergeByteSliceCodecOptions combines the given *ByteSliceCodecOptions into a single *ByteSliceCodecOptions in a last one wins fashion. +func MergeByteSliceCodecOptions(opts ...*ByteSliceCodecOptions) *ByteSliceCodecOptions { + bs := ByteSliceCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.EncodeNilAsEmpty != nil { + bs.EncodeNilAsEmpty = opt.EncodeNilAsEmpty + } + } + + return bs +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go new file mode 100644 index 0000000..c40973c --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go @@ -0,0 +1,8 @@ +// Copyright (C) MongoDB, Inc. 2022-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package bsonoptions defines the optional configurations for the BSON codecs. +package bsonoptions diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go new file mode 100644 index 0000000..6caaa00 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go @@ -0,0 +1,38 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// EmptyInterfaceCodecOptions represents all possible options for interface{} encoding and decoding. +type EmptyInterfaceCodecOptions struct { + DecodeBinaryAsSlice *bool // Specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false. +} + +// EmptyInterfaceCodec creates a new *EmptyInterfaceCodecOptions +func EmptyInterfaceCodec() *EmptyInterfaceCodecOptions { + return &EmptyInterfaceCodecOptions{} +} + +// SetDecodeBinaryAsSlice specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false. +func (e *EmptyInterfaceCodecOptions) SetDecodeBinaryAsSlice(b bool) *EmptyInterfaceCodecOptions { + e.DecodeBinaryAsSlice = &b + return e +} + +// MergeEmptyInterfaceCodecOptions combines the given *EmptyInterfaceCodecOptions into a single *EmptyInterfaceCodecOptions in a last one wins fashion. +func MergeEmptyInterfaceCodecOptions(opts ...*EmptyInterfaceCodecOptions) *EmptyInterfaceCodecOptions { + e := EmptyInterfaceCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.DecodeBinaryAsSlice != nil { + e.DecodeBinaryAsSlice = opt.DecodeBinaryAsSlice + } + } + + return e +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go new file mode 100644 index 0000000..7a6a880 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go @@ -0,0 +1,67 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// MapCodecOptions represents all possible options for map encoding and decoding. +type MapCodecOptions struct { + DecodeZerosMap *bool // Specifies if the map should be zeroed before decoding into it. Defaults to false. + EncodeNilAsEmpty *bool // Specifies if a nil map should encode as an empty document instead of null. Defaults to false. + // Specifies how keys should be handled. If false, the behavior matches encoding/json, where the encoding key type must + // either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key type must either be a + // string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with fmt.Sprint() and the + // encoding key type must be a string, an integer type, or a float. If true, the use of Stringer will override + // TextMarshaler/TextUnmarshaler. Defaults to false. + EncodeKeysWithStringer *bool +} + +// MapCodec creates a new *MapCodecOptions +func MapCodec() *MapCodecOptions { + return &MapCodecOptions{} +} + +// SetDecodeZerosMap specifies if the map should be zeroed before decoding into it. Defaults to false. +func (t *MapCodecOptions) SetDecodeZerosMap(b bool) *MapCodecOptions { + t.DecodeZerosMap = &b + return t +} + +// SetEncodeNilAsEmpty specifies if a nil map should encode as an empty document instead of null. Defaults to false. +func (t *MapCodecOptions) SetEncodeNilAsEmpty(b bool) *MapCodecOptions { + t.EncodeNilAsEmpty = &b + return t +} + +// SetEncodeKeysWithStringer specifies how keys should be handled. If false, the behavior matches encoding/json, where the +// encoding key type must either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key +// type must either be a string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with +// fmt.Sprint() and the encoding key type must be a string, an integer type, or a float. If true, the use of Stringer +// will override TextMarshaler/TextUnmarshaler. Defaults to false. +func (t *MapCodecOptions) SetEncodeKeysWithStringer(b bool) *MapCodecOptions { + t.EncodeKeysWithStringer = &b + return t +} + +// MergeMapCodecOptions combines the given *MapCodecOptions into a single *MapCodecOptions in a last one wins fashion. +func MergeMapCodecOptions(opts ...*MapCodecOptions) *MapCodecOptions { + s := MapCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.DecodeZerosMap != nil { + s.DecodeZerosMap = opt.DecodeZerosMap + } + if opt.EncodeNilAsEmpty != nil { + s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty + } + if opt.EncodeKeysWithStringer != nil { + s.EncodeKeysWithStringer = opt.EncodeKeysWithStringer + } + } + + return s +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go new file mode 100644 index 0000000..ef965e4 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go @@ -0,0 +1,38 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// SliceCodecOptions represents all possible options for slice encoding and decoding. +type SliceCodecOptions struct { + EncodeNilAsEmpty *bool // Specifies if a nil slice should encode as an empty array instead of null. Defaults to false. +} + +// SliceCodec creates a new *SliceCodecOptions +func SliceCodec() *SliceCodecOptions { + return &SliceCodecOptions{} +} + +// SetEncodeNilAsEmpty specifies if a nil slice should encode as an empty array instead of null. Defaults to false. +func (s *SliceCodecOptions) SetEncodeNilAsEmpty(b bool) *SliceCodecOptions { + s.EncodeNilAsEmpty = &b + return s +} + +// MergeSliceCodecOptions combines the given *SliceCodecOptions into a single *SliceCodecOptions in a last one wins fashion. +func MergeSliceCodecOptions(opts ...*SliceCodecOptions) *SliceCodecOptions { + s := SliceCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.EncodeNilAsEmpty != nil { + s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty + } + } + + return s +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go new file mode 100644 index 0000000..65964f4 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go @@ -0,0 +1,41 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +var defaultDecodeOIDAsHex = true + +// StringCodecOptions represents all possible options for string encoding and decoding. +type StringCodecOptions struct { + DecodeObjectIDAsHex *bool // Specifies if we should decode ObjectID as the hex value. Defaults to true. +} + +// StringCodec creates a new *StringCodecOptions +func StringCodec() *StringCodecOptions { + return &StringCodecOptions{} +} + +// SetDecodeObjectIDAsHex specifies if object IDs should be decoded as their hex representation. If false, a string made +// from the raw object ID bytes will be used. Defaults to true. +func (t *StringCodecOptions) SetDecodeObjectIDAsHex(b bool) *StringCodecOptions { + t.DecodeObjectIDAsHex = &b + return t +} + +// MergeStringCodecOptions combines the given *StringCodecOptions into a single *StringCodecOptions in a last one wins fashion. +func MergeStringCodecOptions(opts ...*StringCodecOptions) *StringCodecOptions { + s := &StringCodecOptions{&defaultDecodeOIDAsHex} + for _, opt := range opts { + if opt == nil { + continue + } + if opt.DecodeObjectIDAsHex != nil { + s.DecodeObjectIDAsHex = opt.DecodeObjectIDAsHex + } + } + + return s +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go new file mode 100644 index 0000000..78d1dd8 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go @@ -0,0 +1,87 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +var defaultOverwriteDuplicatedInlinedFields = true + +// StructCodecOptions represents all possible options for struct encoding and decoding. +type StructCodecOptions struct { + DecodeZeroStruct *bool // Specifies if structs should be zeroed before decoding into them. Defaults to false. + DecodeDeepZeroInline *bool // Specifies if structs should be recursively zeroed when a inline value is decoded. Defaults to false. + EncodeOmitDefaultStruct *bool // Specifies if default structs should be considered empty by omitempty. Defaults to false. + AllowUnexportedFields *bool // Specifies if unexported fields should be marshaled/unmarshaled. Defaults to false. + OverwriteDuplicatedInlinedFields *bool // Specifies if fields in inlined structs can be overwritten by higher level struct fields with the same key. Defaults to true. +} + +// StructCodec creates a new *StructCodecOptions +func StructCodec() *StructCodecOptions { + return &StructCodecOptions{} +} + +// SetDecodeZeroStruct specifies if structs should be zeroed before decoding into them. Defaults to false. +func (t *StructCodecOptions) SetDecodeZeroStruct(b bool) *StructCodecOptions { + t.DecodeZeroStruct = &b + return t +} + +// SetDecodeDeepZeroInline specifies if structs should be zeroed before decoding into them. Defaults to false. +func (t *StructCodecOptions) SetDecodeDeepZeroInline(b bool) *StructCodecOptions { + t.DecodeDeepZeroInline = &b + return t +} + +// SetEncodeOmitDefaultStruct specifies if default structs should be considered empty by omitempty. A default struct has all +// its values set to their default value. Defaults to false. +func (t *StructCodecOptions) SetEncodeOmitDefaultStruct(b bool) *StructCodecOptions { + t.EncodeOmitDefaultStruct = &b + return t +} + +// SetOverwriteDuplicatedInlinedFields specifies if inlined struct fields can be overwritten by higher level struct fields with the +// same bson key. When true and decoding, values will be written to the outermost struct with a matching key, and when +// encoding, keys will have the value of the top-most matching field. When false, decoding and encoding will error if +// there are duplicate keys after the struct is inlined. Defaults to true. +func (t *StructCodecOptions) SetOverwriteDuplicatedInlinedFields(b bool) *StructCodecOptions { + t.OverwriteDuplicatedInlinedFields = &b + return t +} + +// SetAllowUnexportedFields specifies if unexported fields should be marshaled/unmarshaled. Defaults to false. +func (t *StructCodecOptions) SetAllowUnexportedFields(b bool) *StructCodecOptions { + t.AllowUnexportedFields = &b + return t +} + +// MergeStructCodecOptions combines the given *StructCodecOptions into a single *StructCodecOptions in a last one wins fashion. +func MergeStructCodecOptions(opts ...*StructCodecOptions) *StructCodecOptions { + s := &StructCodecOptions{ + OverwriteDuplicatedInlinedFields: &defaultOverwriteDuplicatedInlinedFields, + } + for _, opt := range opts { + if opt == nil { + continue + } + + if opt.DecodeZeroStruct != nil { + s.DecodeZeroStruct = opt.DecodeZeroStruct + } + if opt.DecodeDeepZeroInline != nil { + s.DecodeDeepZeroInline = opt.DecodeDeepZeroInline + } + if opt.EncodeOmitDefaultStruct != nil { + s.EncodeOmitDefaultStruct = opt.EncodeOmitDefaultStruct + } + if opt.OverwriteDuplicatedInlinedFields != nil { + s.OverwriteDuplicatedInlinedFields = opt.OverwriteDuplicatedInlinedFields + } + if opt.AllowUnexportedFields != nil { + s.AllowUnexportedFields = opt.AllowUnexportedFields + } + } + + return s +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go new file mode 100644 index 0000000..13496d1 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go @@ -0,0 +1,38 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// TimeCodecOptions represents all possible options for time.Time encoding and decoding. +type TimeCodecOptions struct { + UseLocalTimeZone *bool // Specifies if we should decode into the local time zone. Defaults to false. +} + +// TimeCodec creates a new *TimeCodecOptions +func TimeCodec() *TimeCodecOptions { + return &TimeCodecOptions{} +} + +// SetUseLocalTimeZone specifies if we should decode into the local time zone. Defaults to false. +func (t *TimeCodecOptions) SetUseLocalTimeZone(b bool) *TimeCodecOptions { + t.UseLocalTimeZone = &b + return t +} + +// MergeTimeCodecOptions combines the given *TimeCodecOptions into a single *TimeCodecOptions in a last one wins fashion. +func MergeTimeCodecOptions(opts ...*TimeCodecOptions) *TimeCodecOptions { + t := TimeCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.UseLocalTimeZone != nil { + t.UseLocalTimeZone = opt.UseLocalTimeZone + } + } + + return t +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go new file mode 100644 index 0000000..e08b7f1 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go @@ -0,0 +1,38 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonoptions + +// UIntCodecOptions represents all possible options for uint encoding and decoding. +type UIntCodecOptions struct { + EncodeToMinSize *bool // Specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false. +} + +// UIntCodec creates a new *UIntCodecOptions +func UIntCodec() *UIntCodecOptions { + return &UIntCodecOptions{} +} + +// SetEncodeToMinSize specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false. +func (u *UIntCodecOptions) SetEncodeToMinSize(b bool) *UIntCodecOptions { + u.EncodeToMinSize = &b + return u +} + +// MergeUIntCodecOptions combines the given *UIntCodecOptions into a single *UIntCodecOptions in a last one wins fashion. +func MergeUIntCodecOptions(opts ...*UIntCodecOptions) *UIntCodecOptions { + u := UIntCodec() + for _, opt := range opts { + if opt == nil { + continue + } + if opt.EncodeToMinSize != nil { + u.EncodeToMinSize = opt.EncodeToMinSize + } + } + + return u +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go new file mode 100644 index 0000000..5cdf646 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go @@ -0,0 +1,445 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonrw + +import ( + "fmt" + "io" + + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" +) + +// Copier is a type that allows copying between ValueReaders, ValueWriters, and +// []byte values. +type Copier struct{} + +// NewCopier creates a new copier with the given registry. If a nil registry is provided +// a default registry is used. +func NewCopier() Copier { + return Copier{} +} + +// CopyDocument handles copying a document from src to dst. +func CopyDocument(dst ValueWriter, src ValueReader) error { + return Copier{}.CopyDocument(dst, src) +} + +// CopyDocument handles copying one document from the src to the dst. +func (c Copier) CopyDocument(dst ValueWriter, src ValueReader) error { + dr, err := src.ReadDocument() + if err != nil { + return err + } + + dw, err := dst.WriteDocument() + if err != nil { + return err + } + + return c.copyDocumentCore(dw, dr) +} + +// CopyArrayFromBytes copies the values from a BSON array represented as a +// []byte to a ValueWriter. +func (c Copier) CopyArrayFromBytes(dst ValueWriter, src []byte) error { + aw, err := dst.WriteArray() + if err != nil { + return err + } + + err = c.CopyBytesToArrayWriter(aw, src) + if err != nil { + return err + } + + return aw.WriteArrayEnd() +} + +// CopyDocumentFromBytes copies the values from a BSON document represented as a +// []byte to a ValueWriter. +func (c Copier) CopyDocumentFromBytes(dst ValueWriter, src []byte) error { + dw, err := dst.WriteDocument() + if err != nil { + return err + } + + err = c.CopyBytesToDocumentWriter(dw, src) + if err != nil { + return err + } + + return dw.WriteDocumentEnd() +} + +type writeElementFn func(key string) (ValueWriter, error) + +// CopyBytesToArrayWriter copies the values from a BSON Array represented as a []byte to an +// ArrayWriter. +func (c Copier) CopyBytesToArrayWriter(dst ArrayWriter, src []byte) error { + wef := func(_ string) (ValueWriter, error) { + return dst.WriteArrayElement() + } + + return c.copyBytesToValueWriter(src, wef) +} + +// CopyBytesToDocumentWriter copies the values from a BSON document represented as a []byte to a +// DocumentWriter. +func (c Copier) CopyBytesToDocumentWriter(dst DocumentWriter, src []byte) error { + wef := func(key string) (ValueWriter, error) { + return dst.WriteDocumentElement(key) + } + + return c.copyBytesToValueWriter(src, wef) +} + +func (c Copier) copyBytesToValueWriter(src []byte, wef writeElementFn) error { + // TODO(skriptble): Create errors types here. Anything thats a tag should be a property. + length, rem, ok := bsoncore.ReadLength(src) + if !ok { + return fmt.Errorf("couldn't read length from src, not enough bytes. length=%d", len(src)) + } + if len(src) < int(length) { + return fmt.Errorf("length read exceeds number of bytes available. length=%d bytes=%d", len(src), length) + } + rem = rem[:length-4] + + var t bsontype.Type + var key string + var val bsoncore.Value + for { + t, rem, ok = bsoncore.ReadType(rem) + if !ok { + return io.EOF + } + if t == bsontype.Type(0) { + if len(rem) != 0 { + return fmt.Errorf("document end byte found before end of document. remaining bytes=%v", rem) + } + break + } + + key, rem, ok = bsoncore.ReadKey(rem) + if !ok { + return fmt.Errorf("invalid key found. remaining bytes=%v", rem) + } + + // write as either array element or document element using writeElementFn + vw, err := wef(key) + if err != nil { + return err + } + + val, rem, ok = bsoncore.ReadValue(rem, t) + if !ok { + return fmt.Errorf("not enough bytes available to read type. bytes=%d type=%s", len(rem), t) + } + err = c.CopyValueFromBytes(vw, t, val.Data) + if err != nil { + return err + } + } + return nil +} + +// CopyDocumentToBytes copies an entire document from the ValueReader and +// returns it as bytes. +func (c Copier) CopyDocumentToBytes(src ValueReader) ([]byte, error) { + return c.AppendDocumentBytes(nil, src) +} + +// AppendDocumentBytes functions the same as CopyDocumentToBytes, but will +// append the result to dst. +func (c Copier) AppendDocumentBytes(dst []byte, src ValueReader) ([]byte, error) { + if br, ok := src.(BytesReader); ok { + _, dst, err := br.ReadValueBytes(dst) + return dst, err + } + + vw := vwPool.Get().(*valueWriter) + defer vwPool.Put(vw) + + vw.reset(dst) + + err := c.CopyDocument(vw, src) + dst = vw.buf + return dst, err +} + +// AppendArrayBytes copies an array from the ValueReader to dst. +func (c Copier) AppendArrayBytes(dst []byte, src ValueReader) ([]byte, error) { + if br, ok := src.(BytesReader); ok { + _, dst, err := br.ReadValueBytes(dst) + return dst, err + } + + vw := vwPool.Get().(*valueWriter) + defer vwPool.Put(vw) + + vw.reset(dst) + + err := c.copyArray(vw, src) + dst = vw.buf + return dst, err +} + +// CopyValueFromBytes will write the value represtend by t and src to dst. +func (c Copier) CopyValueFromBytes(dst ValueWriter, t bsontype.Type, src []byte) error { + if wvb, ok := dst.(BytesWriter); ok { + return wvb.WriteValueBytes(t, src) + } + + vr := vrPool.Get().(*valueReader) + defer vrPool.Put(vr) + + vr.reset(src) + vr.pushElement(t) + + return c.CopyValue(dst, vr) +} + +// CopyValueToBytes copies a value from src and returns it as a bsontype.Type and a +// []byte. +func (c Copier) CopyValueToBytes(src ValueReader) (bsontype.Type, []byte, error) { + return c.AppendValueBytes(nil, src) +} + +// AppendValueBytes functions the same as CopyValueToBytes, but will append the +// result to dst. +func (c Copier) AppendValueBytes(dst []byte, src ValueReader) (bsontype.Type, []byte, error) { + if br, ok := src.(BytesReader); ok { + return br.ReadValueBytes(dst) + } + + vw := vwPool.Get().(*valueWriter) + defer vwPool.Put(vw) + + start := len(dst) + + vw.reset(dst) + vw.push(mElement) + + err := c.CopyValue(vw, src) + if err != nil { + return 0, dst, err + } + + return bsontype.Type(vw.buf[start]), vw.buf[start+2:], nil +} + +// CopyValue will copy a single value from src to dst. +func (c Copier) CopyValue(dst ValueWriter, src ValueReader) error { + var err error + switch src.Type() { + case bsontype.Double: + var f64 float64 + f64, err = src.ReadDouble() + if err != nil { + break + } + err = dst.WriteDouble(f64) + case bsontype.String: + var str string + str, err = src.ReadString() + if err != nil { + return err + } + err = dst.WriteString(str) + case bsontype.EmbeddedDocument: + err = c.CopyDocument(dst, src) + case bsontype.Array: + err = c.copyArray(dst, src) + case bsontype.Binary: + var data []byte + var subtype byte + data, subtype, err = src.ReadBinary() + if err != nil { + break + } + err = dst.WriteBinaryWithSubtype(data, subtype) + case bsontype.Undefined: + err = src.ReadUndefined() + if err != nil { + break + } + err = dst.WriteUndefined() + case bsontype.ObjectID: + var oid primitive.ObjectID + oid, err = src.ReadObjectID() + if err != nil { + break + } + err = dst.WriteObjectID(oid) + case bsontype.Boolean: + var b bool + b, err = src.ReadBoolean() + if err != nil { + break + } + err = dst.WriteBoolean(b) + case bsontype.DateTime: + var dt int64 + dt, err = src.ReadDateTime() + if err != nil { + break + } + err = dst.WriteDateTime(dt) + case bsontype.Null: + err = src.ReadNull() + if err != nil { + break + } + err = dst.WriteNull() + case bsontype.Regex: + var pattern, options string + pattern, options, err = src.ReadRegex() + if err != nil { + break + } + err = dst.WriteRegex(pattern, options) + case bsontype.DBPointer: + var ns string + var pointer primitive.ObjectID + ns, pointer, err = src.ReadDBPointer() + if err != nil { + break + } + err = dst.WriteDBPointer(ns, pointer) + case bsontype.JavaScript: + var js string + js, err = src.ReadJavascript() + if err != nil { + break + } + err = dst.WriteJavascript(js) + case bsontype.Symbol: + var symbol string + symbol, err = src.ReadSymbol() + if err != nil { + break + } + err = dst.WriteSymbol(symbol) + case bsontype.CodeWithScope: + var code string + var srcScope DocumentReader + code, srcScope, err = src.ReadCodeWithScope() + if err != nil { + break + } + + var dstScope DocumentWriter + dstScope, err = dst.WriteCodeWithScope(code) + if err != nil { + break + } + err = c.copyDocumentCore(dstScope, srcScope) + case bsontype.Int32: + var i32 int32 + i32, err = src.ReadInt32() + if err != nil { + break + } + err = dst.WriteInt32(i32) + case bsontype.Timestamp: + var t, i uint32 + t, i, err = src.ReadTimestamp() + if err != nil { + break + } + err = dst.WriteTimestamp(t, i) + case bsontype.Int64: + var i64 int64 + i64, err = src.ReadInt64() + if err != nil { + break + } + err = dst.WriteInt64(i64) + case bsontype.Decimal128: + var d128 primitive.Decimal128 + d128, err = src.ReadDecimal128() + if err != nil { + break + } + err = dst.WriteDecimal128(d128) + case bsontype.MinKey: + err = src.ReadMinKey() + if err != nil { + break + } + err = dst.WriteMinKey() + case bsontype.MaxKey: + err = src.ReadMaxKey() + if err != nil { + break + } + err = dst.WriteMaxKey() + default: + err = fmt.Errorf("Cannot copy unknown BSON type %s", src.Type()) + } + + return err +} + +func (c Copier) copyArray(dst ValueWriter, src ValueReader) error { + ar, err := src.ReadArray() + if err != nil { + return err + } + + aw, err := dst.WriteArray() + if err != nil { + return err + } + + for { + vr, err := ar.ReadValue() + if err == ErrEOA { + break + } + if err != nil { + return err + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + err = c.CopyValue(vw, vr) + if err != nil { + return err + } + } + + return aw.WriteArrayEnd() +} + +func (c Copier) copyDocumentCore(dw DocumentWriter, dr DocumentReader) error { + for { + key, vr, err := dr.ReadElement() + if err == ErrEOD { + break + } + if err != nil { + return err + } + + vw, err := dw.WriteDocumentElement(key) + if err != nil { + return err + } + + err = c.CopyValue(vw, vr) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go new file mode 100644 index 0000000..750b0d2 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go @@ -0,0 +1,9 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package bsonrw contains abstractions for reading and writing +// BSON and BSON like types from sources. +package bsonrw // import "go.mongodb.org/mongo-driver/bson/bsonrw" diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go new file mode 100644 index 0000000..54c76bf --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go @@ -0,0 +1,806 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonrw + +import ( + "encoding/base64" + "encoding/hex" + "errors" + "fmt" + "io" + "strings" + + "go.mongodb.org/mongo-driver/bson/bsontype" +) + +const maxNestingDepth = 200 + +// ErrInvalidJSON indicates the JSON input is invalid +var ErrInvalidJSON = errors.New("invalid JSON input") + +type jsonParseState byte + +const ( + jpsStartState jsonParseState = iota + jpsSawBeginObject + jpsSawEndObject + jpsSawBeginArray + jpsSawEndArray + jpsSawColon + jpsSawComma + jpsSawKey + jpsSawValue + jpsDoneState + jpsInvalidState +) + +type jsonParseMode byte + +const ( + jpmInvalidMode jsonParseMode = iota + jpmObjectMode + jpmArrayMode +) + +type extJSONValue struct { + t bsontype.Type + v interface{} +} + +type extJSONObject struct { + keys []string + values []*extJSONValue +} + +type extJSONParser struct { + js *jsonScanner + s jsonParseState + m []jsonParseMode + k string + v *extJSONValue + + err error + canonical bool + depth int + maxDepth int + + emptyObject bool + relaxedUUID bool +} + +// newExtJSONParser returns a new extended JSON parser, ready to to begin +// parsing from the first character of the argued json input. It will not +// perform any read-ahead and will therefore not report any errors about +// malformed JSON at this point. +func newExtJSONParser(r io.Reader, canonical bool) *extJSONParser { + return &extJSONParser{ + js: &jsonScanner{r: r}, + s: jpsStartState, + m: []jsonParseMode{}, + canonical: canonical, + maxDepth: maxNestingDepth, + } +} + +// peekType examines the next value and returns its BSON Type +func (ejp *extJSONParser) peekType() (bsontype.Type, error) { + var t bsontype.Type + var err error + initialState := ejp.s + + ejp.advanceState() + switch ejp.s { + case jpsSawValue: + t = ejp.v.t + case jpsSawBeginArray: + t = bsontype.Array + case jpsInvalidState: + err = ejp.err + case jpsSawComma: + // in array mode, seeing a comma means we need to progress again to actually observe a type + if ejp.peekMode() == jpmArrayMode { + return ejp.peekType() + } + case jpsSawEndArray: + // this would only be a valid state if we were in array mode, so return end-of-array error + err = ErrEOA + case jpsSawBeginObject: + // peek key to determine type + ejp.advanceState() + switch ejp.s { + case jpsSawEndObject: // empty embedded document + t = bsontype.EmbeddedDocument + ejp.emptyObject = true + case jpsInvalidState: + err = ejp.err + case jpsSawKey: + if initialState == jpsStartState { + return bsontype.EmbeddedDocument, nil + } + t = wrapperKeyBSONType(ejp.k) + + // if $uuid is encountered, parse as binary subtype 4 + if ejp.k == "$uuid" { + ejp.relaxedUUID = true + t = bsontype.Binary + } + + switch t { + case bsontype.JavaScript: + // just saw $code, need to check for $scope at same level + _, err = ejp.readValue(bsontype.JavaScript) + if err != nil { + break + } + + switch ejp.s { + case jpsSawEndObject: // type is TypeJavaScript + case jpsSawComma: + ejp.advanceState() + + if ejp.s == jpsSawKey && ejp.k == "$scope" { + t = bsontype.CodeWithScope + } else { + err = fmt.Errorf("invalid extended JSON: unexpected key %s in CodeWithScope object", ejp.k) + } + case jpsInvalidState: + err = ejp.err + default: + err = ErrInvalidJSON + } + case bsontype.CodeWithScope: + err = errors.New("invalid extended JSON: code with $scope must contain $code before $scope") + } + } + } + + return t, err +} + +// readKey parses the next key and its type and returns them +func (ejp *extJSONParser) readKey() (string, bsontype.Type, error) { + if ejp.emptyObject { + ejp.emptyObject = false + return "", 0, ErrEOD + } + + // advance to key (or return with error) + switch ejp.s { + case jpsStartState: + ejp.advanceState() + if ejp.s == jpsSawBeginObject { + ejp.advanceState() + } + case jpsSawBeginObject: + ejp.advanceState() + case jpsSawValue, jpsSawEndObject, jpsSawEndArray: + ejp.advanceState() + switch ejp.s { + case jpsSawBeginObject, jpsSawComma: + ejp.advanceState() + case jpsSawEndObject: + return "", 0, ErrEOD + case jpsDoneState: + return "", 0, io.EOF + case jpsInvalidState: + return "", 0, ejp.err + default: + return "", 0, ErrInvalidJSON + } + case jpsSawKey: // do nothing (key was peeked before) + default: + return "", 0, invalidRequestError("key") + } + + // read key + var key string + + switch ejp.s { + case jpsSawKey: + key = ejp.k + case jpsSawEndObject: + return "", 0, ErrEOD + case jpsInvalidState: + return "", 0, ejp.err + default: + return "", 0, invalidRequestError("key") + } + + // check for colon + ejp.advanceState() + if err := ensureColon(ejp.s, key); err != nil { + return "", 0, err + } + + // peek at the value to determine type + t, err := ejp.peekType() + if err != nil { + return "", 0, err + } + + return key, t, nil +} + +// readValue returns the value corresponding to the Type returned by peekType +func (ejp *extJSONParser) readValue(t bsontype.Type) (*extJSONValue, error) { + if ejp.s == jpsInvalidState { + return nil, ejp.err + } + + var v *extJSONValue + + switch t { + case bsontype.Null, bsontype.Boolean, bsontype.String: + if ejp.s != jpsSawValue { + return nil, invalidRequestError(t.String()) + } + v = ejp.v + case bsontype.Int32, bsontype.Int64, bsontype.Double: + // relaxed version allows these to be literal number values + if ejp.s == jpsSawValue { + v = ejp.v + break + } + fallthrough + case bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID, bsontype.MinKey, bsontype.MaxKey, bsontype.Undefined: + switch ejp.s { + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read value + ejp.advanceState() + if ejp.s != jpsSawValue || !ejp.ensureExtValueType(t) { + return nil, invalidJSONErrorForType("value", t) + } + + v = ejp.v + + // read end object + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("} after value", t) + } + default: + return nil, invalidRequestError(t.String()) + } + case bsontype.Binary, bsontype.Regex, bsontype.Timestamp, bsontype.DBPointer: + if ejp.s != jpsSawKey { + return nil, invalidRequestError(t.String()) + } + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + ejp.advanceState() + if t == bsontype.Binary && ejp.s == jpsSawValue { + // convert relaxed $uuid format + if ejp.relaxedUUID { + defer func() { ejp.relaxedUUID = false }() + uuid, err := ejp.v.parseSymbol() + if err != nil { + return nil, err + } + + // RFC 4122 defines the length of a UUID as 36 and the hyphens in a UUID as appearing + // in the 8th, 13th, 18th, and 23rd characters. + // + // See https://tools.ietf.org/html/rfc4122#section-3 + valid := len(uuid) == 36 && + string(uuid[8]) == "-" && + string(uuid[13]) == "-" && + string(uuid[18]) == "-" && + string(uuid[23]) == "-" + if !valid { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens") + } + + // remove hyphens + uuidNoHyphens := strings.Replace(uuid, "-", "", -1) + if len(uuidNoHyphens) != 32 { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens") + } + + // convert hex to bytes + bytes, err := hex.DecodeString(uuidNoHyphens) + if err != nil { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %v", err) + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("$uuid and value and then }", bsontype.Binary) + } + + base64 := &extJSONValue{ + t: bsontype.String, + v: base64.StdEncoding.EncodeToString(bytes), + } + subType := &extJSONValue{ + t: bsontype.String, + v: "04", + } + + v = &extJSONValue{ + t: bsontype.EmbeddedDocument, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{base64, subType}, + }, + } + + break + } + + // convert legacy $binary format + base64 := ejp.v + + ejp.advanceState() + if ejp.s != jpsSawComma { + return nil, invalidJSONErrorForType(",", bsontype.Binary) + } + + ejp.advanceState() + key, t, err := ejp.readKey() + if err != nil { + return nil, err + } + if key != "$type" { + return nil, invalidJSONErrorForType("$type", bsontype.Binary) + } + + subType, err := ejp.readValue(t) + if err != nil { + return nil, err + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("2 key-value pairs and then }", bsontype.Binary) + } + + v = &extJSONValue{ + t: bsontype.EmbeddedDocument, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{base64, subType}, + }, + } + break + } + + // read KV pairs + if ejp.s != jpsSawBeginObject { + return nil, invalidJSONErrorForType("{", t) + } + + keys, vals, err := ejp.readObject(2, true) + if err != nil { + return nil, err + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("2 key-value pairs and then }", t) + } + + v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}} + + case bsontype.DateTime: + switch ejp.s { + case jpsSawValue: + v = ejp.v + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + ejp.advanceState() + switch ejp.s { + case jpsSawBeginObject: + keys, vals, err := ejp.readObject(1, true) + if err != nil { + return nil, err + } + v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}} + case jpsSawValue: + if ejp.canonical { + return nil, invalidJSONError("{") + } + v = ejp.v + default: + if ejp.canonical { + return nil, invalidJSONErrorForType("object", t) + } + return nil, invalidJSONErrorForType("ISO-8601 Internet Date/Time Format as described in RFC-3339", t) + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("value and then }", t) + } + default: + return nil, invalidRequestError(t.String()) + } + case bsontype.JavaScript: + switch ejp.s { + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read value + ejp.advanceState() + if ejp.s != jpsSawValue { + return nil, invalidJSONErrorForType("value", t) + } + v = ejp.v + + // read end object or comma and just return + ejp.advanceState() + case jpsSawEndObject: + v = ejp.v + default: + return nil, invalidRequestError(t.String()) + } + case bsontype.CodeWithScope: + if ejp.s == jpsSawKey && ejp.k == "$scope" { + v = ejp.v // this is the $code string from earlier + + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read { + ejp.advanceState() + if ejp.s != jpsSawBeginObject { + return nil, invalidJSONError("$scope to be embedded document") + } + } else { + return nil, invalidRequestError(t.String()) + } + case bsontype.EmbeddedDocument, bsontype.Array: + return nil, invalidRequestError(t.String()) + } + + return v, nil +} + +// readObject is a utility method for reading full objects of known (or expected) size +// it is useful for extended JSON types such as binary, datetime, regex, and timestamp +func (ejp *extJSONParser) readObject(numKeys int, started bool) ([]string, []*extJSONValue, error) { + keys := make([]string, numKeys) + vals := make([]*extJSONValue, numKeys) + + if !started { + ejp.advanceState() + if ejp.s != jpsSawBeginObject { + return nil, nil, invalidJSONError("{") + } + } + + for i := 0; i < numKeys; i++ { + key, t, err := ejp.readKey() + if err != nil { + return nil, nil, err + } + + switch ejp.s { + case jpsSawKey: + v, err := ejp.readValue(t) + if err != nil { + return nil, nil, err + } + + keys[i] = key + vals[i] = v + case jpsSawValue: + keys[i] = key + vals[i] = ejp.v + default: + return nil, nil, invalidJSONError("value") + } + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, nil, invalidJSONError("}") + } + + return keys, vals, nil +} + +// advanceState reads the next JSON token from the scanner and transitions +// from the current state based on that token's type +func (ejp *extJSONParser) advanceState() { + if ejp.s == jpsDoneState || ejp.s == jpsInvalidState { + return + } + + jt, err := ejp.js.nextToken() + + if err != nil { + ejp.err = err + ejp.s = jpsInvalidState + return + } + + valid := ejp.validateToken(jt.t) + if !valid { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + return + } + + switch jt.t { + case jttBeginObject: + ejp.s = jpsSawBeginObject + ejp.pushMode(jpmObjectMode) + ejp.depth++ + + if ejp.depth > ejp.maxDepth { + ejp.err = nestingDepthError(jt.p, ejp.depth) + ejp.s = jpsInvalidState + } + case jttEndObject: + ejp.s = jpsSawEndObject + ejp.depth-- + + if ejp.popMode() != jpmObjectMode { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttBeginArray: + ejp.s = jpsSawBeginArray + ejp.pushMode(jpmArrayMode) + case jttEndArray: + ejp.s = jpsSawEndArray + + if ejp.popMode() != jpmArrayMode { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttColon: + ejp.s = jpsSawColon + case jttComma: + ejp.s = jpsSawComma + case jttEOF: + ejp.s = jpsDoneState + if len(ejp.m) != 0 { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttString: + switch ejp.s { + case jpsSawComma: + if ejp.peekMode() == jpmArrayMode { + ejp.s = jpsSawValue + ejp.v = extendJSONToken(jt) + return + } + fallthrough + case jpsSawBeginObject: + ejp.s = jpsSawKey + ejp.k = jt.v.(string) + return + } + fallthrough + default: + ejp.s = jpsSawValue + ejp.v = extendJSONToken(jt) + } +} + +var jpsValidTransitionTokens = map[jsonParseState]map[jsonTokenType]bool{ + jpsStartState: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + jttEOF: true, + }, + jpsSawBeginObject: { + jttEndObject: true, + jttString: true, + }, + jpsSawEndObject: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsSawBeginArray: { + jttBeginObject: true, + jttBeginArray: true, + jttEndArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawEndArray: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsSawColon: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawComma: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawKey: { + jttColon: true, + }, + jpsSawValue: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsDoneState: {}, + jpsInvalidState: {}, +} + +func (ejp *extJSONParser) validateToken(jtt jsonTokenType) bool { + switch ejp.s { + case jpsSawEndObject: + // if we are at depth zero and the next token is a '{', + // we can consider it valid only if we are not in array mode. + if jtt == jttBeginObject && ejp.depth == 0 { + return ejp.peekMode() != jpmArrayMode + } + case jpsSawComma: + switch ejp.peekMode() { + // the only valid next token after a comma inside a document is a string (a key) + case jpmObjectMode: + return jtt == jttString + case jpmInvalidMode: + return false + } + } + + _, ok := jpsValidTransitionTokens[ejp.s][jtt] + return ok +} + +// ensureExtValueType returns true if the current value has the expected +// value type for single-key extended JSON types. For example, +// {"$numberInt": v} v must be TypeString +func (ejp *extJSONParser) ensureExtValueType(t bsontype.Type) bool { + switch t { + case bsontype.MinKey, bsontype.MaxKey: + return ejp.v.t == bsontype.Int32 + case bsontype.Undefined: + return ejp.v.t == bsontype.Boolean + case bsontype.Int32, bsontype.Int64, bsontype.Double, bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID: + return ejp.v.t == bsontype.String + default: + return false + } +} + +func (ejp *extJSONParser) pushMode(m jsonParseMode) { + ejp.m = append(ejp.m, m) +} + +func (ejp *extJSONParser) popMode() jsonParseMode { + l := len(ejp.m) + if l == 0 { + return jpmInvalidMode + } + + m := ejp.m[l-1] + ejp.m = ejp.m[:l-1] + + return m +} + +func (ejp *extJSONParser) peekMode() jsonParseMode { + l := len(ejp.m) + if l == 0 { + return jpmInvalidMode + } + + return ejp.m[l-1] +} + +func extendJSONToken(jt *jsonToken) *extJSONValue { + var t bsontype.Type + + switch jt.t { + case jttInt32: + t = bsontype.Int32 + case jttInt64: + t = bsontype.Int64 + case jttDouble: + t = bsontype.Double + case jttString: + t = bsontype.String + case jttBool: + t = bsontype.Boolean + case jttNull: + t = bsontype.Null + default: + return nil + } + + return &extJSONValue{t: t, v: jt.v} +} + +func ensureColon(s jsonParseState, key string) error { + if s != jpsSawColon { + return fmt.Errorf("invalid JSON input: missing colon after key \"%s\"", key) + } + + return nil +} + +func invalidRequestError(s string) error { + return fmt.Errorf("invalid request to read %s", s) +} + +func invalidJSONError(expected string) error { + return fmt.Errorf("invalid JSON input; expected %s", expected) +} + +func invalidJSONErrorForType(expected string, t bsontype.Type) error { + return fmt.Errorf("invalid JSON input; expected %s for %s", expected, t) +} + +func unexpectedTokenError(jt *jsonToken) error { + switch jt.t { + case jttInt32, jttInt64, jttDouble: + return fmt.Errorf("invalid JSON input; unexpected number (%v) at position %d", jt.v, jt.p) + case jttString: + return fmt.Errorf("invalid JSON input; unexpected string (\"%v\") at position %d", jt.v, jt.p) + case jttBool: + return fmt.Errorf("invalid JSON input; unexpected boolean literal (%v) at position %d", jt.v, jt.p) + case jttNull: + return fmt.Errorf("invalid JSON input; unexpected null literal at position %d", jt.p) + case jttEOF: + return fmt.Errorf("invalid JSON input; unexpected end of input at position %d", jt.p) + default: + return fmt.Errorf("invalid JSON input; unexpected %c at position %d", jt.v.(byte), jt.p) + } +} + +func nestingDepthError(p, depth int) error { + return fmt.Errorf("invalid JSON input; nesting too deep (%d levels) at position %d", depth, p) +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go new file mode 100644 index 0000000..35832d7 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go @@ -0,0 +1,644 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bsonrw + +import ( + "fmt" + "io" + "sync" + + "go.mongodb.org/mongo-driver/bson/bsontype" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// ExtJSONValueReaderPool is a pool for ValueReaders that read ExtJSON. +type ExtJSONValueReaderPool struct { + pool sync.Pool +} + +// NewExtJSONValueReaderPool instantiates a new ExtJSONValueReaderPool. +func NewExtJSONValueReaderPool() *ExtJSONValueReaderPool { + return &ExtJSONValueReaderPool{ + pool: sync.Pool{ + New: func() interface{} { + return new(extJSONValueReader) + }, + }, + } +} + +// Get retrieves a ValueReader from the pool and uses src as the underlying ExtJSON. +func (bvrp *ExtJSONValueReaderPool) Get(r io.Reader, canonical bool) (ValueReader, error) { + vr := bvrp.pool.Get().(*extJSONValueReader) + return vr.reset(r, canonical) +} + +// Put inserts a ValueReader into the pool. If the ValueReader is not a ExtJSON ValueReader nothing +// is inserted into the pool and ok will be false. +func (bvrp *ExtJSONValueReaderPool) Put(vr ValueReader) (ok bool) { + bvr, ok := vr.(*extJSONValueReader) + if !ok { + return false + } + + bvr, _ = bvr.reset(nil, false) + bvrp.pool.Put(bvr) + return true +} + +type ejvrState struct { + mode mode + vType bsontype.Type + depth int +} + +// extJSONValueReader is for reading extended JSON. +type extJSONValueReader struct { + p *extJSONParser + + stack []ejvrState + frame int +} + +// NewExtJSONValueReader creates a new ValueReader from a given io.Reader +// It will interpret the JSON of r as canonical or relaxed according to the +// given canonical flag +func NewExtJSONValueReader(r io.Reader, canonical bool) (ValueReader, error) { + return newExtJSONValueReader(r, canonical) +} + +func newExtJSONValueReader(r io.Reader, canonical bool) (*extJSONValueReader, error) { + ejvr := new(extJSONValueReader) + return ejvr.reset(r, canonical) +} + +func (ejvr *extJSONValueReader) reset(r io.Reader, canonical bool) (*extJSONValueReader, error) { + p := newExtJSONParser(r, canonical) + typ, err := p.peekType() + + if err != nil { + return nil, ErrInvalidJSON + } + + var m mode + switch typ { + case bsontype.EmbeddedDocument: + m = mTopLevel + case bsontype.Array: + m = mArray + default: + m = mValue + } + + stack := make([]ejvrState, 1, 5) + stack[0] = ejvrState{ + mode: m, + vType: typ, + } + return &extJSONValueReader{ + p: p, + stack: stack, + }, nil +} + +func (ejvr *extJSONValueReader) advanceFrame() { + if ejvr.frame+1 >= len(ejvr.stack) { // We need to grow the stack + length := len(ejvr.stack) + if length+1 >= cap(ejvr.stack) { + // double it + buf := make([]ejvrState, 2*cap(ejvr.stack)+1) + copy(buf, ejvr.stack) + ejvr.stack = buf + } + ejvr.stack = ejvr.stack[:length+1] + } + ejvr.frame++ + + // Clean the stack + ejvr.stack[ejvr.frame].mode = 0 + ejvr.stack[ejvr.frame].vType = 0 + ejvr.stack[ejvr.frame].depth = 0 +} + +func (ejvr *extJSONValueReader) pushDocument() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mDocument + ejvr.stack[ejvr.frame].depth = ejvr.p.depth +} + +func (ejvr *extJSONValueReader) pushCodeWithScope() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mCodeWithScope +} + +func (ejvr *extJSONValueReader) pushArray() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mArray +} + +func (ejvr *extJSONValueReader) push(m mode, t bsontype.Type) { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = m + ejvr.stack[ejvr.frame].vType = t +} + +func (ejvr *extJSONValueReader) pop() { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + ejvr.frame-- + case mDocument, mArray, mCodeWithScope: + ejvr.frame -= 2 // we pop twice to jump over the vrElement: vrDocument -> vrElement -> vrDocument/TopLevel/etc... + } +} + +func (ejvr *extJSONValueReader) skipObject() { + // read entire object until depth returns to 0 (last ending } or ] seen) + depth := 1 + for depth > 0 { + ejvr.p.advanceState() + + // If object is empty, raise depth and continue. When emptyObject is true, the + // parser has already read both the opening and closing brackets of an empty + // object ("{}"), so the next valid token will be part of the parent document, + // not part of the nested document. + // + // If there is a comma, there are remaining fields, emptyObject must be set back + // to false, and comma must be skipped with advanceState(). + if ejvr.p.emptyObject { + if ejvr.p.s == jpsSawComma { + ejvr.p.emptyObject = false + ejvr.p.advanceState() + } + depth-- + continue + } + + switch ejvr.p.s { + case jpsSawBeginObject, jpsSawBeginArray: + depth++ + case jpsSawEndObject, jpsSawEndArray: + depth-- + } + } +} + +func (ejvr *extJSONValueReader) invalidTransitionErr(destination mode, name string, modes []mode) error { + te := TransitionError{ + name: name, + current: ejvr.stack[ejvr.frame].mode, + destination: destination, + modes: modes, + action: "read", + } + if ejvr.frame != 0 { + te.parent = ejvr.stack[ejvr.frame-1].mode + } + return te +} + +func (ejvr *extJSONValueReader) typeError(t bsontype.Type) error { + return fmt.Errorf("positioned on %s, but attempted to read %s", ejvr.stack[ejvr.frame].vType, t) +} + +func (ejvr *extJSONValueReader) ensureElementValue(t bsontype.Type, destination mode, callerName string, addModes ...mode) error { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + if ejvr.stack[ejvr.frame].vType != t { + return ejvr.typeError(t) + } + default: + modes := []mode{mElement, mValue} + if addModes != nil { + modes = append(modes, addModes...) + } + return ejvr.invalidTransitionErr(destination, callerName, modes) + } + + return nil +} + +func (ejvr *extJSONValueReader) Type() bsontype.Type { + return ejvr.stack[ejvr.frame].vType +} + +func (ejvr *extJSONValueReader) Skip() error { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + default: + return ejvr.invalidTransitionErr(0, "Skip", []mode{mElement, mValue}) + } + + defer ejvr.pop() + + t := ejvr.stack[ejvr.frame].vType + switch t { + case bsontype.Array, bsontype.EmbeddedDocument, bsontype.CodeWithScope: + // read entire array, doc or CodeWithScope + ejvr.skipObject() + default: + _, err := ejvr.p.readValue(t) + if err != nil { + return err + } + } + + return nil +} + +func (ejvr *extJSONValueReader) ReadArray() (ArrayReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel: // allow reading array from top level + case mArray: + return ejvr, nil + default: + if err := ejvr.ensureElementValue(bsontype.Array, mArray, "ReadArray", mTopLevel, mArray); err != nil { + return nil, err + } + } + + ejvr.pushArray() + + return ejvr, nil +} + +func (ejvr *extJSONValueReader) ReadBinary() (b []byte, btype byte, err error) { + if err := ejvr.ensureElementValue(bsontype.Binary, 0, "ReadBinary"); err != nil { + return nil, 0, err + } + + v, err := ejvr.p.readValue(bsontype.Binary) + if err != nil { + return nil, 0, err + } + + b, btype, err = v.parseBinary() + + ejvr.pop() + return b, btype, err +} + +func (ejvr *extJSONValueReader) ReadBoolean() (bool, error) { + if err := ejvr.ensureElementValue(bsontype.Boolean, 0, "ReadBoolean"); err != nil { + return false, err + } + + v, err := ejvr.p.readValue(bsontype.Boolean) + if err != nil { + return false, err + } + + if v.t != bsontype.Boolean { + return false, fmt.Errorf("expected type bool, but got type %s", v.t) + } + + ejvr.pop() + return v.v.(bool), nil +} + +func (ejvr *extJSONValueReader) ReadDocument() (DocumentReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel: + return ejvr, nil + case mElement, mValue: + if ejvr.stack[ejvr.frame].vType != bsontype.EmbeddedDocument { + return nil, ejvr.typeError(bsontype.EmbeddedDocument) + } + + ejvr.pushDocument() + return ejvr, nil + default: + return nil, ejvr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue}) + } +} + +func (ejvr *extJSONValueReader) ReadCodeWithScope() (code string, dr DocumentReader, err error) { + if err = ejvr.ensureElementValue(bsontype.CodeWithScope, 0, "ReadCodeWithScope"); err != nil { + return "", nil, err + } + + v, err := ejvr.p.readValue(bsontype.CodeWithScope) + if err != nil { + return "", nil, err + } + + code, err = v.parseJavascript() + + ejvr.pushCodeWithScope() + return code, ejvr, err +} + +func (ejvr *extJSONValueReader) ReadDBPointer() (ns string, oid primitive.ObjectID, err error) { + if err = ejvr.ensureElementValue(bsontype.DBPointer, 0, "ReadDBPointer"); err != nil { + return "", primitive.NilObjectID, err + } + + v, err := ejvr.p.readValue(bsontype.DBPointer) + if err != nil { + return "", primitive.NilObjectID, err + } + + ns, oid, err = v.parseDBPointer() + + ejvr.pop() + return ns, oid, err +} + +func (ejvr *extJSONValueReader) ReadDateTime() (int64, error) { + if err := ejvr.ensureElementValue(bsontype.DateTime, 0, "ReadDateTime"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(bsontype.DateTime) + if err != nil { + return 0, err + } + + d, err := v.parseDateTime() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadDecimal128() (primitive.Decimal128, error) { + if err := ejvr.ensureElementValue(bsontype.Decimal128, 0, "ReadDecimal128"); err != nil { + return primitive.Decimal128{}, err + } + + v, err := ejvr.p.readValue(bsontype.Decimal128) + if err != nil { + return primitive.Decimal128{}, err + } + + d, err := v.parseDecimal128() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadDouble() (float64, error) { + if err := ejvr.ensureElementValue(bsontype.Double, 0, "ReadDouble"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(bsontype.Double) + if err != nil { + return 0, err + } + + d, err := v.parseDouble() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadInt32() (int32, error) { + if err := ejvr.ensureElementValue(bsontype.Int32, 0, "ReadInt32"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(bsontype.Int32) + if err != nil { + return 0, err + } + + i, err := v.parseInt32() + + ejvr.pop() + return i, err +} + +func (ejvr *extJSONValueReader) ReadInt64() (int64, error) { + if err := ejvr.ensureElementValue(bsontype.Int64, 0, "ReadInt64"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(bsontype.Int64) + if err != nil { + return 0, err + } + + i, err := v.parseInt64() + + ejvr.pop() + return i, err +} + +func (ejvr *extJSONValueReader) ReadJavascript() (code string, err error) { + if err = ejvr.ensureElementValue(bsontype.JavaScript, 0, "ReadJavascript"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(bsontype.JavaScript) + if err != nil { + return "", err + } + + code, err = v.parseJavascript() + + ejvr.pop() + return code, err +} + +func (ejvr *extJSONValueReader) ReadMaxKey() error { + if err := ejvr.ensureElementValue(bsontype.MaxKey, 0, "ReadMaxKey"); err != nil { + return err + } + + v, err := ejvr.p.readValue(bsontype.MaxKey) + if err != nil { + return err + } + + err = v.parseMinMaxKey("max") + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadMinKey() error { + if err := ejvr.ensureElementValue(bsontype.MinKey, 0, "ReadMinKey"); err != nil { + return err + } + + v, err := ejvr.p.readValue(bsontype.MinKey) + if err != nil { + return err + } + + err = v.parseMinMaxKey("min") + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadNull() error { + if err := ejvr.ensureElementValue(bsontype.Null, 0, "ReadNull"); err != nil { + return err + } + + v, err := ejvr.p.readValue(bsontype.Null) + if err != nil { + return err + } + + if v.t != bsontype.Null { + return fmt.Errorf("expected type null but got type %s", v.t) + } + + ejvr.pop() + return nil +} + +func (ejvr *extJSONValueReader) ReadObjectID() (primitive.ObjectID, error) { + if err := ejvr.ensureElementValue(bsontype.ObjectID, 0, "ReadObjectID"); err != nil { + return primitive.ObjectID{}, err + } + + v, err := ejvr.p.readValue(bsontype.ObjectID) + if err != nil { + return primitive.ObjectID{}, err + } + + oid, err := v.parseObjectID() + + ejvr.pop() + return oid, err +} + +func (ejvr *extJSONValueReader) ReadRegex() (pattern string, options string, err error) { + if err = ejvr.ensureElementValue(bsontype.Regex, 0, "ReadRegex"); err != nil { + return "", "", err + } + + v, err := ejvr.p.readValue(bsontype.Regex) + if err != nil { + return "", "", err + } + + pattern, options, err = v.parseRegex() + + ejvr.pop() + return pattern, options, err +} + +func (ejvr *extJSONValueReader) ReadString() (string, error) { + if err := ejvr.ensureElementValue(bsontype.String, 0, "ReadString"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(bsontype.String) + if err != nil { + return "", err + } + + if v.t != bsontype.String { + return "", fmt.Errorf("expected type string but got type %s", v.t) + } + + ejvr.pop() + return v.v.(string), nil +} + +func (ejvr *extJSONValueReader) ReadSymbol() (symbol string, err error) { + if err = ejvr.ensureElementValue(bsontype.Symbol, 0, "ReadSymbol"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(bsontype.Symbol) + if err != nil { + return "", err + } + + symbol, err = v.parseSymbol() + + ejvr.pop() + return symbol, err +} + +func (ejvr *extJSONValueReader) ReadTimestamp() (t uint32, i uint32, err error) { + if err = ejvr.ensureElementValue(bsontype.Timestamp, 0, "ReadTimestamp"); err != nil { + return 0, 0, err + } + + v, err := ejvr.p.readValue(bsontype.Timestamp) + if err != nil { + return 0, 0, err + } + + t, i, err = v.parseTimestamp() + + ejvr.pop() + return t, i, err +} + +func (ejvr *extJSONValueReader) ReadUndefined() error { + if err := ejvr.ensureElementValue(bsontype.Undefined, 0, "ReadUndefined"); err != nil { + return err + } + + v, err := ejvr.p.readValue(bsontype.Undefined) + if err != nil { + return err + } + + err = v.parseUndefined() + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadElement() (string, ValueReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel, mDocument, mCodeWithScope: + default: + return "", nil, ejvr.invalidTransitionErr(mElement, "ReadElement", []mode{mTopLevel, mDocument, mCodeWithScope}) + } + + name, t, err := ejvr.p.readKey() + + if err != nil { + if err == ErrEOD { + if ejvr.stack[ejvr.frame].mode == mCodeWithScope { + _, err := ejvr.p.peekType() + if err != nil { + return "", nil, err + } + } + + ejvr.pop() + } + + return "", nil, err + } + + ejvr.push(mElement, t) + return name, ejvr, nil +} + +func (ejvr *extJSONValueReader) ReadValue() (ValueReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mArray: + default: + return nil, ejvr.invalidTransitionErr(mValue, "ReadValue", []mode{mArray}) + } + + t, err := ejvr.p.peekType() + if err != nil { + if err == ErrEOA { + ejvr.pop() + } + + return nil, err + } + + ejvr.push(mValue, t) + return ejvr, nil +} diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go new file mode 100644 index 0000000..ba39c96 --- /dev/null +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go @@ -0,0 +1,223 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// +// Based on github.com/golang/go by The Go Authors +// See THIRD-PARTY-NOTICES for original license terms. + +package bsonrw + +import "unicode/utf8" + +// safeSet holds the value true if the ASCII character with the given array +// position can be represented inside a JSON string without any further +// escaping. +// +// All values are true except for the ASCII control characters (0-31), the +// double quote ("), and the backslash character ("\"). +var safeSet = [utf8.RuneSelf]bool{ + ' ': true, + '!': true, + '"': false, + '#': true, + '$': true, + '%': true, + '&': true, + '\'': true, + '(': true, + ')': true, + '*': true, + '+': true, + ',': true, + '-': true, + '.': true, + '/': true, + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + ':': true, + ';': true, + '<': true, + '=': true, + '>': true, + '?': true, + '@': true, + 'A': true, + 'B': true, + 'C': true, + 'D': true, + 'E': true, + 'F': true, + 'G': true, + 'H': true, + 'I': true, + 'J': true, + 'K': true, + 'L': true, + 'M': true, + 'N': true, + 'O': true, + 'P': true, + 'Q': true, + 'R': true, + 'S': true, + 'T': true, + 'U': true, + 'V': true, + 'W': true, + 'X': true, + 'Y': true, + 'Z': true, + '[': true, + '\\': false, + ']': true, + '^': true, + '_': true, + '`': true, + 'a': true, + 'b': true, + 'c': true, + 'd': true, + 'e': true, + 'f': true, + 'g': true, + 'h': true, + 'i': true, + 'j': true, + 'k': true, + 'l': true, + 'm': true, + 'n': true, + 'o': true, + 'p': true, + 'q': true, + 'r': true, + 's': true, + 't': true, + 'u': true, + 'v': true, + 'w': true, + 'x': true, + 'y': true, + 'z': true, + '{': true, + '|': true, + '}': true, + '~': true, + '\u007f': true, +} + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML