update go modules

This commit is contained in:
Dmitry Verkhoturov 2024-04-10 23:48:47 +02:00 committed by Umputun
parent e0423b8683
commit 4bb0017060
136 changed files with 3780 additions and 2079 deletions

View File

@ -6,21 +6,21 @@ require (
github.com/go-pkgz/jrpc v0.3.0
github.com/go-pkgz/lgr v0.11.1
github.com/jessevdk/go-flags v1.5.0
github.com/stretchr/testify v1.8.4
github.com/stretchr/testify v1.9.0
github.com/umputun/remark42/backend v1.1000.0
)
require (
github.com/Depado/bfchroma/v2 v2.0.0 // indirect
github.com/PuerkitoBio/goquery v1.8.1 // indirect
github.com/PuerkitoBio/goquery v1.9.1 // indirect
github.com/ajg/form v1.5.1 // indirect
github.com/alecthomas/chroma/v2 v2.12.0 // indirect
github.com/alecthomas/chroma/v2 v2.13.0 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/didip/tollbooth/v7 v7.0.1 // indirect
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f // indirect
github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/dlclark/regexp2 v1.11.0 // indirect
github.com/go-chi/chi/v5 v5.0.12 // indirect
github.com/go-chi/render v1.0.3 // indirect
github.com/go-pkgz/expirable-cache v1.0.0 // indirect
@ -32,10 +32,10 @@ require (
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rs/xid v1.5.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
go.etcd.io/bbolt v1.3.8 // indirect
go.etcd.io/bbolt v1.3.9 // indirect
golang.org/x/image v0.15.0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sys v0.17.0 // indirect
golang.org/x/net v0.24.0 // indirect
golang.org/x/sys v0.19.0 // indirect
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@ -1,16 +1,15 @@
github.com/Depado/bfchroma/v2 v2.0.0 h1:IRpN9BPkNwEpR6w1ectIcNWOuhDSLx+8f1pn83fzxx8=
github.com/Depado/bfchroma/v2 v2.0.0/go.mod h1:wFwW/Pw8Tnd0irzgO9Zxtxgzp3aPS8qBWlyadxujxmw=
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
github.com/PuerkitoBio/goquery v1.9.1 h1:mTL6XjbJTZdpfL+Gwl5U2h1l9yEkJjhmlTeV9VPW7UI=
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw=
github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU=
github.com/alecthomas/assert/v2 v2.6.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.13.0 h1:VP72+99Fb2zEcYM0MeaWJmV+xQvz5v5cxRHd+ooU1lI=
github.com/alecthomas/chroma/v2 v2.13.0/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
@ -23,8 +22,8 @@ github.com/didip/tollbooth/v7 v7.0.1 h1:TkT4sBKoQoHQFPf7blQ54iHrZiTDnr8TceU+MulV
github.com/didip/tollbooth/v7 v7.0.1/go.mod h1:VZhDSGl5bDSPj4wPsih3PFa4Uh9Ghv8hgacaTm5PRT4=
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f h1:jtKwihcLmUC9BAhoJ9adCUqdSSZcOdH2KL7mPTUm2aw=
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f/go.mod h1:q9C80dnsuVRP2dAskjnXRNWdUJqtGgwG9wNrzt0019s=
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s=
github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
@ -71,11 +70,11 @@ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSS
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA=
go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI=
go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
@ -84,34 +83,32 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=

View File

@ -4,14 +4,14 @@ go 1.21
require (
github.com/Depado/bfchroma/v2 v2.0.0
github.com/PuerkitoBio/goquery v1.8.1
github.com/alecthomas/chroma/v2 v2.12.0
github.com/PuerkitoBio/goquery v1.9.1
github.com/alecthomas/chroma/v2 v2.13.0
github.com/didip/tollbooth/v7 v7.0.1
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f
github.com/go-chi/chi/v5 v5.0.12
github.com/go-chi/cors v1.2.1
github.com/go-chi/render v1.0.3
github.com/go-pkgz/auth v1.22.2-0.20240117071454-f721b8c33b05
github.com/go-pkgz/auth v1.23.0
github.com/go-pkgz/jrpc v0.3.0
github.com/go-pkgz/lcw/v2 v2.0.0
github.com/go-pkgz/lgr v0.11.1
@ -29,35 +29,35 @@ require (
github.com/rs/xid v1.5.0
github.com/russross/blackfriday/v2 v2.1.0
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e
github.com/stretchr/testify v1.8.4
go.etcd.io/bbolt v1.3.8
github.com/stretchr/testify v1.9.0
go.etcd.io/bbolt v1.3.9
go.uber.org/goleak v1.3.0
golang.org/x/crypto v0.19.0
golang.org/x/crypto v0.22.0
golang.org/x/image v0.15.0
golang.org/x/net v0.21.0
golang.org/x/net v0.24.0
)
require (
cloud.google.com/go/compute v1.23.3 // indirect
cloud.google.com/go/compute v1.25.1 // indirect
cloud.google.com/go/compute/metadata v0.2.3 // indirect
github.com/ajg/form v1.5.1 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dghubble/oauth1 v0.7.2 // indirect
github.com/dghubble/oauth1 v0.7.3 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/dlclark/regexp2 v1.11.0 // indirect
github.com/go-oauth2/oauth2/v4 v4.5.2 // indirect
github.com/go-pkgz/email v0.5.0 // indirect
github.com/go-pkgz/expirable-cache v1.0.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/websocket v1.5.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/klauspost/compress v1.17.4 // indirect
github.com/klauspost/compress v1.17.7 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/redis/go-redis/v9 v9.4.0 // indirect
@ -67,10 +67,10 @@ require (
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
go.mongodb.org/mongo-driver v1.13.1 // indirect
golang.org/x/oauth2 v0.17.0 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
golang.org/x/oauth2 v0.18.0 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/sys v0.17.0 // indirect
golang.org/x/sys v0.19.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/protobuf v1.33.0 // indirect

View File

@ -1,27 +1,26 @@
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk=
cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI=
cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU=
cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls=
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
github.com/Depado/bfchroma/v2 v2.0.0 h1:IRpN9BPkNwEpR6w1ectIcNWOuhDSLx+8f1pn83fzxx8=
github.com/Depado/bfchroma/v2 v2.0.0/go.mod h1:wFwW/Pw8Tnd0irzgO9Zxtxgzp3aPS8qBWlyadxujxmw=
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
github.com/PuerkitoBio/goquery v1.9.1 h1:mTL6XjbJTZdpfL+Gwl5U2h1l9yEkJjhmlTeV9VPW7UI=
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw=
github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU=
github.com/alecthomas/assert/v2 v2.6.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.13.0 h1:VP72+99Fb2zEcYM0MeaWJmV+xQvz5v5cxRHd+ooU1lI=
github.com/alecthomas/chroma/v2 v2.13.0/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/alicebob/gopher-json v0.0.0-20230218143504-906a9b012302 h1:uvdUDbHQHO85qeSydJtItA4T55Pw6BtAejd0APRJOCE=
github.com/alicebob/gopher-json v0.0.0-20230218143504-906a9b012302/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
github.com/alicebob/miniredis/v2 v2.31.1 h1:7XAt0uUg3DtwEKW5ZAGa+K7FZV2DdKQo5K/6TTnfX8Y=
github.com/alicebob/miniredis/v2 v2.31.1/go.mod h1:UB/T2Uztp7MlFSDakaX1sTXUv5CASoprx0wulRT6HBg=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
@ -35,8 +34,8 @@ github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dghubble/oauth1 v0.7.2 h1:pwcinOZy8z6XkNxvPmUDY52M7RDPxt0Xw1zgZ6Cl5JA=
github.com/dghubble/oauth1 v0.7.2/go.mod h1:9erQdIhqhOHG/7K9s/tgh9Ks/AfoyrO5mW/43Lu2+kE=
github.com/dghubble/oauth1 v0.7.3 h1:EkEM/zMDMp3zOsX2DC/ZQ2vnEX3ELK0/l9kb+vs4ptE=
github.com/dghubble/oauth1 v0.7.3/go.mod h1:oxTe+az9NSMIucDPDCCtzJGsPhciJV33xocHfcR2sVY=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/didip/tollbooth/v7 v7.0.0/go.mod h1:VZhDSGl5bDSPj4wPsih3PFa4Uh9Ghv8hgacaTm5PRT4=
@ -44,8 +43,8 @@ github.com/didip/tollbooth/v7 v7.0.1 h1:TkT4sBKoQoHQFPf7blQ54iHrZiTDnr8TceU+MulV
github.com/didip/tollbooth/v7 v7.0.1/go.mod h1:VZhDSGl5bDSPj4wPsih3PFa4Uh9Ghv8hgacaTm5PRT4=
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f h1:jtKwihcLmUC9BAhoJ9adCUqdSSZcOdH2KL7mPTUm2aw=
github.com/didip/tollbooth_chi v0.0.0-20220719025231-d662a7f6928f/go.mod h1:q9C80dnsuVRP2dAskjnXRNWdUJqtGgwG9wNrzt0019s=
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
@ -63,8 +62,8 @@ github.com/go-chi/render v1.0.3 h1:AsXqd2a1/INaIfUSKq3G5uA8weYx20FOsM7uSoCyyt4=
github.com/go-chi/render v1.0.3/go.mod h1:/gr3hVkmYR0YlEy3LxCuVRFzEu9Ruok+gFqbIofjao0=
github.com/go-oauth2/oauth2/v4 v4.5.2 h1:CuZhD3lhGuI6aNLyUbRHXsgG2RwGRBOuCBfd4WQKqBQ=
github.com/go-oauth2/oauth2/v4 v4.5.2/go.mod h1:wk/2uLImWIa9VVQDgxz99H2GDbhmfi/9/Xr+GvkSUSQ=
github.com/go-pkgz/auth v1.22.2-0.20240117071454-f721b8c33b05 h1:oe9D1WzmKgG27i9PtDhxpDm+2n7KR431XHBGKLaQDLs=
github.com/go-pkgz/auth v1.22.2-0.20240117071454-f721b8c33b05/go.mod h1:/ceyBqZEjBsjP8evrd25LnNM2zMNqj0vc5PIKYJmi0U=
github.com/go-pkgz/auth v1.23.0 h1:jv/HaXKPL31p+SpEXYaGmROAEUWl5nWt/mQ31DCs7Y4=
github.com/go-pkgz/auth v1.23.0/go.mod h1:2j8ln0c2TbCuhVCvcqh8mZC7ap5r1brNXkbF/VHi0F4=
github.com/go-pkgz/email v0.5.0 h1:fdtMDGJ8NwyBACLR0LYHaCIK/OeUwZHMhH7Q0+oty9U=
github.com/go-pkgz/email v0.5.0/go.mod h1:BdxglsQnymzhfdbnncEE72a6DrucZHy6I+42LK2jLEc=
github.com/go-pkgz/expirable-cache v0.1.0/go.mod h1:GTrEl0X+q0mPNqN6dtcQXksACnzCBQ5k/k1SwXJsZKs=
@ -101,15 +100,13 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
@ -143,10 +140,9 @@ github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LF
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@ -160,7 +156,6 @@ github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58=
github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs=
@ -202,27 +197,34 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tidwall/btree v0.0.0-20191029221954-400434d76274 h1:G6Z6HvJuPjG6XfNGi/feOATzeJrfgTNJY+rGrHbA04E=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tidwall/btree v0.0.0-20191029221954-400434d76274/go.mod h1:huei1BkDWJ3/sLXmO+bsCNELL+Bp2Kks9OLyQFkzvA8=
github.com/tidwall/buntdb v1.1.2 h1:noCrqQXL9EKMtcdwJcmuVKSEjqu1ua99RHHgbLTEHRo=
github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI=
github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY=
github.com/tidwall/buntdb v1.1.2/go.mod h1:xAzi36Hir4FarpSHyfuZ6JzPJdjRZ8QlLZSntE2mqlI=
github.com/tidwall/buntdb v1.3.0 h1:gdhWO+/YwoB2qZMeAU9JcWWsHSYU3OvcieYgFRS0zwA=
github.com/tidwall/buntdb v1.3.0/go.mod h1:lZZrZUWzlyDJKlLQ6DKAy53LnG7m5kHyrEHvvcDmBpU=
github.com/tidwall/gjson v1.3.4/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls=
github.com/tidwall/gjson v1.12.1 h1:ikuZsLdhr8Ws0IdROXUS1Gi4v9Z4pGqpX/CvJkxvfpo=
github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/grect v0.0.0-20161006141115-ba9a043346eb h1:5NSYaAdrnblKByzd7XByQEJVT8+9v0W/tIY0Oo4OwrE=
github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U=
github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/grect v0.0.0-20161006141115-ba9a043346eb/go.mod h1:lKYYLFIr9OIgdgrtgkZ9zgRxRdvPYsExnYBsEAd8W5M=
github.com/tidwall/grect v0.1.4 h1:dA3oIgNgWdSspFzn1kS4S/RDpZFLrIxAZOdJKjYapOg=
github.com/tidwall/grect v0.1.4/go.mod h1:9FBsaYRaR0Tcy4UwefBX/UDcDcDy9V5jUcxHzv2jd5Q=
github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/rtree v0.0.0-20180113144539-6cd427091e0e h1:+NL1GDIUOKxVfbp2KoJQD9cTQ6dyP2co9q4yzmT9FZo=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/rtred v0.1.2 h1:exmoQtOLvDoO8ud++6LwVsAMTu0KPzLTUrMln8u1yu8=
github.com/tidwall/rtred v0.1.2/go.mod h1:hd69WNXQ5RP9vHd7dqekAz+RIdtfBogmglkZSRxCHFQ=
github.com/tidwall/rtree v0.0.0-20180113144539-6cd427091e0e/go.mod h1:/h+UnNGt0IhNNJLkGikcdcJqm66zGD/uJGMRxK/9+Ao=
github.com/tidwall/tinyqueue v0.0.0-20180302190814-1e39f5511563 h1:Otn9S136ELckZ3KKDyCkxapfufrqDqwmGjcHfAyXRrE=
github.com/tidwall/tinyqueue v0.0.0-20180302190814-1e39f5511563/go.mod h1:mLqSmt7Dv/CNneF2wfcChfN1rvapyQr01LGKnKex0DQ=
github.com/tidwall/tinyqueue v0.1.1 h1:SpNEvEggbpyN5DIReaJ2/1ndroY8iyEGxPYxoSaymYE=
github.com/tidwall/tinyqueue v0.1.1/go.mod h1:O/QNHwrnjqr6IHItYrzoHAKYhBkLI67Q096fQP5zMYw=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.34.0 h1:d3AAQJ2DRcxJYHm7OXNXtXt2as1vMDfxeIcFvhmGGm4=
@ -242,7 +244,6 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY=
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA=
@ -253,19 +254,18 @@ github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZ
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA=
go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI=
go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE=
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
@ -279,18 +279,16 @@ golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ=
golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA=
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -318,8 +316,8 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=

View File

@ -15,4 +15,4 @@
package internal
// Version is the current tagged release of the library.
const Version = "1.23.3"
const Version = "1.25.1"

View File

@ -22,7 +22,7 @@ Syntax-wise, it is as close as possible to jQuery, with the same function names
## Installation
Please note that because of the net/html dependency, goquery requires Go1.1+ and is tested on Go1.7+.
Please note that starting with version `v1.9.0` of goquery, Go 1.18+ is required due to the use of generics. For previous goquery versions, a Go version of 1.1+ was required because of the `net/html` dependency. Ongoing goquery development is tested on the latest 2 versions of Go.
$ go get github.com/PuerkitoBio/goquery
@ -40,6 +40,8 @@ Please note that because of the net/html dependency, goquery requires Go1.1+ and
**Note that goquery's API is now stable, and will not break.**
* **2024-02-29 (v1.9.1)** : Improve allocation and performance of the `Map` function and `Selection.Map` method, better document the cascadia differences (thanks [@jwilsson](https://github.com/jwilsson)).
* **2024-02-22 (v1.9.0)** : Add a generic `Map` function, **goquery now requires Go version 1.18+** (thanks [@Fesaa](https://github.com/Fesaa)).
* **2023-02-18 (v1.8.1)** : Update `go.mod` dependencies, update CI workflow.
* **2021-10-25 (v1.8.0)** : Add `Render` function to render a `Selection` to an `io.Writer` (thanks [@anthonygedeon](https://github.com/anthonygedeon)).
* **2021-07-11 (v1.7.1)** : Update go.mod dependencies and add dependabot config (thanks [@jauderho](https://github.com/jauderho)).
@ -88,7 +90,7 @@ Utility functions that are not in jQuery but are useful in Go are implemented as
The complete [package reference documentation can be found here][doc].
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Also, the selectors work more like the DOM's `querySelectorAll`, than jQuery's matchers - they have no concept of contextual matching (for some concrete examples of what that means, see [this ticket](https://github.com/andybalholm/cascadia/issues/61)). In practice, it doesn't matter very often but it's something worth mentioning. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
* `Find("~")` returns an empty selection because the selector string doesn't match anything.
* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
@ -157,6 +159,7 @@ func main() {
- [stitcherd](https://github.com/vhodges/stitcherd), A server for doing server side includes using css selectors and DOM updates.
- [goskyr](https://github.com/jakopako/goskyr), an easily configurable command-line scraper written in Go.
- [goGetJS](https://github.com/davemolk/goGetJS), a tool for extracting, searching, and saving JavaScript files (with optional headless browser).
- [fitter](https://github.com/PxyUp/fitter), a tool for selecting values from JSON, XML, HTML and XPath formatted pages.
## Support
@ -165,7 +168,7 @@ There are a number of ways you can support the project:
* Use it, star it, build something with it, spread the word!
- If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
- Please search existing issues before opening a new one - it may have already been adressed.
- Please search existing issues before opening a new one - it may have already been addressed.
* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
- Make sure new code is tested.
- Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.

View File

@ -31,8 +31,16 @@ func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
// element in that selection starting at 0, and a *Selection that contains
// only that element.
func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
return Map(s, f)
}
// Map is the generic version of Selection.Map, allowing any type to be
// returned.
func Map[E any](s *Selection, f func(int, *Selection) E) (result []E) {
result = make([]E, len(s.Nodes))
for i, n := range s.Nodes {
result = append(result, f(i, newSingleSelection(n, s.document)))
result[i] = f(i, newSingleSelection(n, s.document))
}
return result

View File

@ -20,6 +20,12 @@ const (
// Find gets the descendants of each element in the current set of matched
// elements, filtered by a selector. It returns a new Selection object
// containing these matched elements.
//
// Note that as for all methods accepting a selector string, the selector is
// compiled and applied by the cascadia package and inherits its behavior and
// constraints regarding supported selectors. See the note on cascadia in
// the goquery documentation here:
// https://github.com/PuerkitoBio/goquery?tab=readme-ov-file#api
func (s *Selection) Find(selector string) *Selection {
return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
}

View File

@ -11,3 +11,7 @@ insert_final_newline = true
indent_style = space
indent_size = 2
insert_final_newline = false
[*.yml]
indent_style = space
indent_size = 2

View File

@ -49,6 +49,8 @@ linters:
- nosnakecase
- testableexamples
- musttag
- depguard
- goconst
linters-settings:
govet:

View File

@ -8,75 +8,72 @@ highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translators for Pygments lexers and styles.
<a id="markdown-table-of-contents" name="table-of-contents"></a>
## Table of Contents
<!-- TOC -->
1. [Table of Contents](#table-of-contents)
2. [Supported languages](#supported-languages)
3. [Try it](#try-it)
4. [Using the library](#using-the-library)
1. [Supported languages](#supported-languages)
2. [Try it](#try-it)
3. [Using the library](#using-the-library)
1. [Quick start](#quick-start)
2. [Identifying the language](#identifying-the-language)
3. [Formatting the output](#formatting-the-output)
4. [The HTML formatter](#the-html-formatter)
5. [More detail](#more-detail)
4. [More detail](#more-detail)
1. [Lexers](#lexers)
2. [Formatters](#formatters)
3. [Styles](#styles)
6. [Command-line interface](#command-line-interface)
7. [Testing lexers](#testing-lexers)
8. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
5. [Command-line interface](#command-line-interface)
6. [Testing lexers](#testing-lexers)
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
<!-- /TOC -->
<a id="markdown-supported-languages" name="supported-languages"></a>
## Supported languages
| Prefix | Language |
| :----: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
| D | D, Dart, Dax, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
| E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
| I | Idris, Igor, INI, Io, ISCdhcpd |
| J | J, Java, JavaScript, JSON, Julia, Jungle |
| K | Kotlin |
| L | Lighttpd configuration file, LLVM, Lua |
| M | Makefile, Mako, markdown, Mason, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix |
| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
| Q | QBasic, QML |
| R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust |
| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData |
| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
| W | WDTE, WebGPU Shading Language, Whiley |
| X | XML, Xorg |
| Y | YAML, YANG |
| Z | Z80 Assembly, Zed, Zig |
| Prefix | Language |
| :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
| D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
| E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
| I | Idris, Igor, INI, Io, ISCdhcpd |
| J | J, Java, JavaScript, JSON, Julia, Jungle |
| K | Kotlin |
| L | Lighttpd configuration file, LLVM, Lua |
| M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix |
| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
| Q | QBasic, QML |
| R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust |
| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData |
| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
| W | WDTE, WebGPU Shading Language, Whiley |
| X | XML, Xorg |
| Y | YAML, YANG |
| Z | Z80 Assembly, Zed, Zig |
_I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._
<a id="markdown-try-it" name="try-it"></a>
## Try it
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
<a id="markdown-using-the-library" name="using-the-library"></a>
## Using the library
This is version 2 of Chroma, use the import path:
```go
import "github.com/alecthomas/chroma/v2"
```
Chroma, like Pygments, has the concepts of
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
@ -95,8 +92,6 @@ In all cases, if a lexer, formatter or style can not be determined, `nil` will
be returned. In this situation you may want to default to the `Fallback`
value in each respective package, which provides sane defaults.
<a id="markdown-quick-start" name="quick-start"></a>
### Quick start
A convenience function exists that can be used to simply format some source
@ -106,8 +101,6 @@ text, without any effort:
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
```
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
### Identifying the language
To highlight code, you'll first have to identify what language the code is
@ -147,8 +140,6 @@ token types into a single token:
lexer = chroma.Coalesce(lexer)
```
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
### Formatting the output
Once a language is identified you will need to pick a formatter and a style (theme).
@ -177,8 +168,6 @@ And finally, format the tokens from the iterator:
err := formatter.Format(w, style, iterator)
```
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
### The HTML formatter
By default the `html` registered formatter generates standalone HTML with
@ -203,12 +192,8 @@ formatter := html.New(html.WithClasses(true))
err := formatter.WriteCSS(w, style)
```
<a id="markdown-more-detail" name="more-detail"></a>
## More detail
<a id="markdown-lexers" name="lexers"></a>
### Lexers
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
@ -228,8 +213,6 @@ python3 _tools/pygments2chroma_xml.py \
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a>
### Formatters
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
@ -237,8 +220,6 @@ Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour,
A `noop` formatter is included that outputs the token text only, and a `tokens`
formatter outputs raw tokens. The latter is useful for debugging lexers.
<a id="markdown-styles" name="styles"></a>
### Styles
Chroma styles are defined in XML. The style entries use the
@ -262,8 +243,6 @@ Also, token types in a style file are hierarchical. For instance, when `CommentS
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface
A command-line interface to Chroma is included.
@ -288,10 +267,6 @@ on under the hood for easy integration with [lesspipe shipping with
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
<a id="markdown-testing-lexers" name="testing-lexers"></a>
## Testing lexers
If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run:

View File

@ -5,7 +5,9 @@ import (
"html"
"io"
"sort"
"strconv"
"strings"
"sync"
"github.com/alecthomas/chroma/v2"
)
@ -132,6 +134,7 @@ func New(options ...Option) *Formatter {
baseLineNumber: 1,
preWrapper: defaultPreWrapper,
}
f.styleCache = newStyleCache(f)
for _, option := range options {
option(f)
}
@ -188,6 +191,7 @@ var (
// Formatter that generates HTML.
type Formatter struct {
styleCache *styleCache
standalone bool
prefix string
Classes bool // Exported field to detect when classes are being used
@ -220,12 +224,7 @@ func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Ite
//
// OTOH we need to be super careful about correct escaping...
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
css := f.styleToCSS(style)
if !f.Classes {
for t, style := range css {
css[t] = compressStyle(style)
}
}
css := f.styleCache.get(style, true)
if f.standalone {
fmt.Fprint(w, "<html>\n")
if f.Classes {
@ -243,7 +242,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1))
highlightIndex := 0
if wrapInTable {
@ -251,7 +250,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
for index := range lines {
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
@ -273,7 +272,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
}
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
highlightIndex = 0
for index, tokens := range lines {
@ -323,7 +322,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprint(w, `</span>`) // End of Line
}
}
fmt.Fprintf(w, f.preWrapper.End(true))
fmt.Fprintf(w, "%s", f.preWrapper.End(true))
if wrapInTable {
fmt.Fprint(w, "</td></tr></table>\n")
@ -419,7 +418,7 @@ func (f *Formatter) tabWidthStyle() string {
// WriteCSS writes CSS style definitions (without any surrounding HTML).
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
css := f.styleToCSS(style)
css := f.styleCache.get(style, false)
// Special-case background as it is mapped to the outer ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
return err
@ -562,3 +561,63 @@ func compressStyle(s string) string {
}
return strings.Join(out, ";")
}
const styleCacheLimit = 32
type styleCacheEntry struct {
style *chroma.Style
compressed bool
cache map[chroma.TokenType]string
}
type styleCache struct {
mu sync.Mutex
// LRU cache of compiled (and possibly compressed) styles. This is a slice
// because the cache size is small, and a slice is sufficiently fast for
// small N.
cache []styleCacheEntry
f *Formatter
}
func newStyleCache(f *Formatter) *styleCache {
return &styleCache{f: f}
}
func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string {
l.mu.Lock()
defer l.mu.Unlock()
// Look for an existing entry.
for i := len(l.cache) - 1; i >= 0; i-- {
entry := l.cache[i]
if entry.style == style && entry.compressed == compress {
// Top of the cache, no need to adjust the order.
if i == len(l.cache)-1 {
return entry.cache
}
// Move this entry to the end of the LRU
copy(l.cache[i:], l.cache[i+1:])
l.cache[len(l.cache)-1] = entry
return entry.cache
}
}
// No entry, create one.
cached := l.f.styleToCSS(style)
if !l.f.Classes {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
if compress {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
// Evict the oldest entry.
if len(l.cache) >= styleCacheLimit {
l.cache = l.cache[0:copy(l.cache, l.cache[1:])]
}
l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress})
return cached
}

View File

@ -4,52 +4,82 @@ import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// Matcher token stub for docs, or
// Named matcher: @name, or
// Path matcher: /foo, or
// Wildcard path matcher: *
// nolint: gosec
var caddyfileMatcherTokenRegexp = `(\[\<matcher\>\]|@[^\s]+|/[^\s]+|\*)`
// Comment at start of line, or
// Comment preceded by whitespace
var caddyfileCommentRegexp = `(^|\s+)#.*\n`
// caddyfileCommon are the rules common to both of the lexer variants
func caddyfileCommonRules() Rules {
return Rules{
"site_block_common": {
Include("site_body"),
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"site_body": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
{`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
{`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
{`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")},
// uri directive has special syntax
{`\b(uri)\b`, Keyword, Push("uri_directive")},
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Heredoc for CEL expression
Include("heredoc"),
// Backtick for CEL expression
{"`", StringBacktick, Push("backticks")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\s*\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Using double quotes doesn't stop at spaces
{`"`, StringDouble, Push("double_quotes")},
// Using backticks doesn't stop at spaces
{"`", StringBacktick, Push("backticks")},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
// Directives & matcher definitions
Include("site_body"),
// Any directive
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
// Using double quotes doesn't stop at spaces
{`"`, StringDouble, Push("double_quotes")},
// Using backticks doesn't stop at spaces
{"`", StringBacktick, Push("backticks")},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Directives & matcher definitions
Include("site_body"),
// Any other subdirective
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"not_matcher": {
@ -66,69 +96,97 @@ func caddyfileCommonRules() Rules {
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
{`\s*\n`, Text, Pop(2)},
Include("base"),
},
"nested_arguments": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
{caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\s*\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
{caddyfileCommentRegexp, CommentSingle, Pop(3)},
{`\s*\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
"uri_directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
{caddyfileMatcherTokenRegexp, NameDecorator, nil},
{`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
"double_quotes": {
Include("placeholder"),
{`\\"`, StringDouble, nil},
{`[^"]`, StringDouble, nil},
{`"`, StringDouble, Pop(1)},
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
"backticks": {
Include("placeholder"),
{"\\\\`", StringBacktick, nil},
{"[^`]", StringBacktick, nil},
{"`", StringBacktick, Pop(1)},
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
"optional": {
// Docs syntax for showing optional parts with [ ]
{`\[`, Punctuation, Push("optional")},
Include("name_constants"),
{`\|`, Punctuation, nil},
{`[^\[\]\|]+`, String, nil},
{`\]`, Punctuation, Pop(1)},
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
"heredoc": {
{`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil},
},
"name_constants": {
{`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil},
},
"placeholder": {
// Placeholder with dots, colon for default value, brackets for args[0:]
{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil},
// Handle opening brackets with no matching closing one
{`\{[^\}\s]*\b`, String, nil},
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{caddyfileCommentRegexp, CommentSingle, nil},
{`\[\<matcher\>\]`, NameDecorator, nil},
Include("name_constants"),
Include("heredoc"),
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil},
{`\[`, Punctuation, Push("optional")},
{"`", StringBacktick, Push("backticks")},
{`"`, StringDouble, Push("double_quotes")},
Include("placeholder"),
{`[a-z-]+/[a-z-+]+`, String, nil},
{`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil},
{`[^\s\n#\{]+`, String, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
@ -149,27 +207,29 @@ var Caddyfile = Register(MustNewLexer(
func caddyfileRules() Rules {
return Rules{
"root": {
Include("comments"),
{caddyfileCommentRegexp, CommentSingle, nil},
// Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Top level import
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
{`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")},
{`\s+`, Text, nil},
},
"globals": {
{`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")},
// Global options are parsed as subdirectives (no matcher)
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"snippet": {
{`\}`, Punctuation, Pop(1)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Any directive
Include("site_body"),
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
@ -179,7 +239,7 @@ func caddyfileRules() Rules {
{`,\s*\n?`, Text, nil},
{` `, Text, nil},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
Include("placeholder"),
// Site label
{`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n)

View File

@ -19,10 +19,10 @@
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="///[^\n\r]+">
<rule pattern="///[^\n\r]*">
<token type="CommentSpecial"/>
</rule>
<rule pattern="//[^\n\r]+">
<rule pattern="//[^\n\r]*">
<token type="CommentSingle"/>
</rule>
<rule pattern="/[*].*?[*]/">

View File

@ -49,7 +49,7 @@
<rule pattern="(true|false|null|_)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[_a-zA-Z]\w*">
<rule pattern="#?[_a-zA-Z$]\w*">
<token type="Name"/>
</rule>
</state>

View File

@ -0,0 +1,17 @@
<lexer>
<config>
<name>Desktop file</name>
<alias>desktop</alias>
<alias>desktop_entry</alias>
<filename>*.desktop</filename>
<mime_type>application/x-desktop</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="^[ \t]*\n"><token type="TextWhitespace"/></rule>
<rule pattern="^(#.*)(\n)"><bygroups><token type="CommentSingle"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="(\[[^\]\n]+\])(\n)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="([-A-Za-z0-9]+)(\[[^\] \t=]+\])?([ \t]*)(=)([ \t]*)([^\n]*)([ \t\n]*\n)"><bygroups><token type="NameAttribute"/><token type="NameNamespace"/><token type="TextWhitespace"/><token type="Operator"/><token type="TextWhitespace"/><token type="LiteralString"/><token type="TextWhitespace"/></bygroups></rule>
</state>
</rules>
</lexer>

View File

@ -86,7 +86,7 @@
<rule pattern="\\(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="NameFunction"/>
</rule>
<rule pattern="(&lt;-|::|-&gt;|=&gt;|=)(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<rule pattern="(&lt;-|::|-&gt;|=&gt;|=|'([:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+))(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="OperatorWord"/>
</rule>
<rule pattern=":[:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]*">

View File

@ -3,6 +3,7 @@
<name>JSON</name>
<alias>json</alias>
<filename>*.json</filename>
<filename>*.avsc</filename>
<mime_type>application/json</mime_type>
<dot_all>true</dot_all>
<not_multiline>true</not_multiline>

View File

@ -0,0 +1,155 @@
<lexer>
<config>
<name>Materialize SQL dialect</name>
<alias>materialize</alias>
<alias>mzsql</alias>
<mime_type>text/x-materializesql</mime_type>
<case_insensitive>true</case_insensitive>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="--.*\n?">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)">
<usingbygroup>
<sublexer_name_group>6</sublexer_name_group>
<code_group>12</code_group>
<emitters>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="Text"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(ACCESS|ACKS|ADD|ADDRESSES|AGGREGATE|ALL|ALTER|AND|ANY|ARN|ARRANGEMENT|ARRAY|AS|ASC|ASSERT|AT|AUCTION|AUTHORITY|AVAILABILITY|AVRO|AWS|BEGIN|BETWEEN|BIGINT|BILLED|BODY|BOOLEAN|BOTH|BPCHAR|BROKEN|BROKER|BROKERS|BY|BYTES|CARDINALITY|CASCADE|CASE|CAST|CERTIFICATE|CHAIN|CHAR|CHARACTER|CHARACTERISTICS|CHECK|CLIENT|CLOSE|CLUSTER|CLUSTERS|COALESCE|COLLATE|COLUMN|COLUMNS|COMMENT|COMMIT|COMMITTED|COMPACTION|COMPRESSION|COMPUTE|COMPUTECTL|CONFLUENT|CONNECTION|CONNECTIONS|CONSTRAINT|COPY|COUNT|COUNTER|CREATE|CREATECLUSTER|CREATEDB|CREATEROLE|CROSS|CSV|CURRENT|CURSOR|DATABASE|DATABASES|DATUMS|DAY|DAYS|DEALLOCATE|DEBEZIUM|DEBUG|DEBUGGING|DEC|DECIMAL|DECLARE|DECORRELATED|DEFAULT|DEFAULTS|DELETE|DELIMITED|DELIMITER|DESC|DETAILS|DISCARD|DISK|DISTINCT|DOC|DOT|DOUBLE|DROP|EFFORT|ELEMENT|ELSE|ENABLE|END|ENDPOINT|ENFORCED|ENVELOPE|ERROR|ESCAPE|EXCEPT|EXECUTE|EXISTS|EXPECTED|EXPLAIN|EXPOSE|EXTRACT|FACTOR|FALSE|FETCH|FIELDS|FILTER|FIRST|FLOAT|FOLLOWING|FOR|FOREIGN|FORMAT|FORWARD|FROM|FULL|FULLNAME|FUNCTION|GENERATOR|GRANT|GREATEST|GROUP|GROUPS|HAVING|HEADER|HEADERS|HOLD|HOST|HOUR|HOURS|ID|IDEMPOTENCE|IDLE|IF|IGNORE|ILIKE|IN|INCLUDE|INDEX|INDEXES|INFO|INHERIT|INLINE|INNER|INPUT|INSERT|INSPECT|INT|INTEGER|INTERNAL|INTERSECT|INTERVAL|INTO|INTROSPECTION|IS|ISNULL|ISOLATION|JOIN|JSON|KAFKA|KEY|KEYS|LAST|LATERAL|LATEST|LEADING|LEAST|LEFT|LEVEL|LIKE|LIMIT|LIST|LOAD|LOCAL|LOG|LOGICAL|LOGIN|MANAGED|MAP|MARKETING|MATERIALIZE|MATERIALIZED|MAX|MECHANISMS|MEMBERSHIP|MERGE|MESSAGE|METADATA|MINUTE|MINUTES|MODE|MONTH|MONTHS|MS|MUTUALLY|NAME|NAMES|NATURAL|NEXT|NO|NOCREATECLUSTER|NOCREATEDB|NOCREATEROLE|NOINHERIT|NOLOGIN|NONE|NOSUPERUSER|NOT|NOTICE|NULL|NULLIF|NULLS|OBJECTS|OF|OFFSET|ON|ONLY|OPERATOR|OPTIMIZED|OPTIMIZER|OPTIONS|OR|ORDER|ORDINALITY|OUTER|OVER|OWNED|OWNER|PARTITION|PASSWORD|PHYSICAL|PLAN|PLANS|PORT|POSITION|POSTGRES|PRECEDING|PRECISION|PREFIX|PREPARE|PRIMARY|PRIVATELINK|PRIVILEGES|PROGRESS|PROTOBUF|PROTOCOL|PUBLICATION|QUERY|QUOTE|RAISE|RANGE|RAW|READ|REAL|REASSIGN|RECURSION|RECURSIVE|REFERENCES|REFRESH|REGEX|REGION|REGISTRY|RENAME|REPEATABLE|REPLACE|REPLICA|REPLICAS|REPLICATION|RESET|RESPECT|RESTRICT|RETENTION|RETURN|RETURNING|REVOKE|RIGHT|ROLE|ROLES|ROLLBACK|ROTATE|ROW|ROWS|SASL|SCALE|SCHEMA|SCHEMAS|SCRIPT|SECOND|SECONDS|SECRET|SECRETS|SECURITY|SEED|SELECT|SEQUENCES|SERIALIZABLE|SERVICE|SESSION|SET|SHARD|SHOW|SINK|SINKS|SIZE|SMALLINT|SNAPSHOT|SOME|SOURCE|SOURCES|SSH|SSL|START|STDIN|STDOUT|STORAGE|STORAGECTL|STRATEGY|STRICT|STRING|SUBSCRIBE|SUBSOURCE|SUBSOURCES|SUBSTRING|SUPERUSER|SWAP|SYSTEM|TABLE|TABLES|TAIL|TEMP|TEMPORARY|TEST|TEXT|THEN|TICK|TIES|TIME|TIMELINE|TIMEOUT|TIMESTAMP|TIMESTAMPTZ|TO|TOKEN|TOPIC|TPCH|TRACE|TRAILING|TRANSACTION|TRIM|TRUE|TUNNEL|TYPE|TYPES|UNBOUNDED|UNCOMMITTED|UNION|UNIQUE|UNKNOWN|UP|UPDATE|UPSERT|URL|USAGE|USER|USERNAME|USERS|USING|VALIDATE|VALUE|VALUES|VARCHAR|VARYING|VIEW|VIEWS|WARNING|WEBHOOK|WHEN|WHERE|WINDOW|WIRE|WITH|WITHIN|WITHOUT|WORK|WORKERS|WRITE|YEAR|YEARS|ZONE|ZONES)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[+*/&lt;&gt;=~!@#%^&amp;|`?-]+">
<token type="Operator"/>
</rule>
<rule pattern="::">
<token type="Operator"/>
</rule>
<rule pattern="\$\d+">
<token type="NameVariable"/>
</rule>
<rule pattern="([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="((?:E|U&amp;)?)(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="((?:U&amp;)?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringName"/>
</bygroups>
<push state="quoted-ident"/>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)">
<usingbygroup>
<sublexer_name_group>12</sublexer_name_group>
<code_group>4</code_group>
<emitters>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="[a-z_]\w*">
<token type="Name"/>
</rule>
<rule pattern=":([&#39;&#34;]?)[a-z]\w*\b\1">
<token type="NameVariable"/>
</rule>
<rule pattern="[;:()\[\]{},.]">
<token type="Punctuation"/>
</rule>
</state>
<state name="multiline-comments">
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="\*/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[^/*]+">
<token type="CommentMultiline"/>
</rule>
<rule pattern="[/*]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="string">
<rule pattern="[^&#39;]+">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
</state>
<state name="quoted-ident">
<rule pattern="[^&#34;]+">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;&#34;">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringName"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,123 @@
<lexer>
<config>
<name>NDISASM</name>
<alias>ndisasm</alias>
<mime_type>text/x-disasm</mime_type>
<case_insensitive>true</case_insensitive>
<priority>0.5</priority> <!-- Lower than NASM -->
</config>
<rules>
<state name="root">
<rule pattern="^[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="offset"/>
</rule>
</state>
<state name="offset">
<rule pattern="[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="assembly"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="punctuation">
<rule pattern="[,():\[\]]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[&amp;|^&lt;&gt;+*/%~-]+">
<token type="Operator"/>
</rule>
<rule pattern="[$]+">
<token type="KeywordConstant"/>
</rule>
<rule pattern="seg|wrt|strict">
<token type="OperatorWord"/>
</rule>
<rule pattern="byte|[dq]?word">
<token type="KeywordType"/>
</rule>
</state>
<state name="assembly">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*:">
<token type="NameLabel"/>
</rule>
<rule pattern="([a-z$._?][\w$.?#@~]*)(\s+)(equ)">
<bygroups>
<token type="NameConstant"/>
<token type="KeywordDeclaration"/>
<token type="KeywordDeclaration"/>
</bygroups>
<push state="instruction-args"/>
</rule>
<rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE">
<token type="Keyword"/>
<push state="instruction-args"/>
</rule>
<rule pattern="(?:res|d)[bwdqt]|times">
<token type="KeywordDeclaration"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameFunction"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="2"/>
</rule>
</state>
<state name="instruction-args">
<rule pattern="&#34;(\\&#34;|[^&#34;\n])*&#34;|&#39;(\\&#39;|[^&#39;\n])*&#39;|`(\\`|[^`\n])*`">
<token type="LiteralString"/>
</rule>
<rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-7]+q">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="[01]+b">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="[0-9]+\.e?[0-9]+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule>
<include state="punctuation"/>
</rule>
<rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameVariable"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="3"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\n">
<token type="Text"/>
<pop depth="2"/>
</rule>
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
<rule pattern=";.*">
<token type="CommentSingle"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,119 @@
<lexer>
<config>
<name>Promela</name>
<alias>promela</alias>
<filename>*.pml</filename>
<filename>*.prom</filename>
<filename>*.prm</filename>
<filename>*.promela</filename>
<filename>*.pr</filename>
<filename>*.pm</filename>
<mime_type>text/x-promela</mime_type>
</config>
<rules>
<state name="statements">
<rule pattern="(\[\]|&lt;&gt;|/\\|\\/)|(U|W|V)\b"><token type="Operator"/></rule>
<rule pattern="@"><token type="Punctuation"/></rule>
<rule pattern="(\.)([a-zA-Z_]\w*)"><bygroups><token type="Operator"/><token type="NameAttribute"/></bygroups></rule>
<rule><include state="keywords"/></rule>
<rule><include state="types"/></rule>
<rule pattern="([LuU]|u8)?(&quot;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralString"/></bygroups><push state="string"/></rule>
<rule pattern="([LuU]|u8)?(&#x27;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#x27;\n])(&#x27;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/></bygroups></rule>
<rule pattern="0[xX]([0-9a-fA-F](\&#x27;?[0-9a-fA-F])*\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*)[pP][+-]?[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*[lL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?(\d(\&#x27;?\d)*\.\d(\&#x27;?\d)*|\.\d(\&#x27;?\d)*|\d(\&#x27;?\d)*)[eE][+-]?\d(\&#x27;?\d)*[fFlL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?((\d(\&#x27;?\d)*\.(\d(\&#x27;?\d)*)?|\.\d(\&#x27;?\d)*)[fFlL]?)|(\d(\&#x27;?\d)*[fFlL])"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?0[xX][0-9a-fA-F](\&#x27;?[0-9a-fA-F])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberHex"/></rule>
<rule pattern="(-)?0[bB][01](\&#x27;?[01])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberBin"/></rule>
<rule pattern="(-)?0(\&#x27;?[0-7])+(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberOct"/></rule>
<rule pattern="(-)?\d(\&#x27;?\d)*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberInteger"/></rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]"><token type="Operator"/></rule>
<rule pattern="[()\[\],.]"><token type="Punctuation"/></rule>
<rule pattern="(true|false|NULL)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="Name"/></rule>
</state>
<state name="types">
<rule pattern="(bit|bool|byte|pid|short|int|unsigned)\b"><token type="KeywordType"/></rule>
</state>
<state name="keywords">
<rule pattern="(atomic|break|d_step|do|od|for|in|goto|if|fi|unless)\b"><token type="Keyword"/></rule>
<rule pattern="(assert|get_priority|printf|printm|set_priority)\b"><token type="NameFunction"/></rule>
<rule pattern="(c_code|c_decl|c_expr|c_state|c_track)\b"><token type="Keyword"/></rule>
<rule pattern="(_|_last|_nr_pr|_pid|_priority|else|np_|STDIN)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(empty|enabled|eval|full|len|nempty|nfull|pc_value)\b"><token type="NameFunction"/></rule>
<rule pattern="run\b"><token type="OperatorWord"/></rule>
<rule pattern="(active|chan|D_proctype|hidden|init|local|mtype|never|notrace|proctype|show|trace|typedef|xr|xs)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="(priority|provided)\b"><token type="Keyword"/></rule>
<rule pattern="(inline|ltl|select)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="skip\b"><token type="Keyword"/></rule>
</state>
<state name="whitespace">
<rule pattern="^#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
<rule pattern="^#"><token type="CommentPreproc"/><push state="macro"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="if0"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="macro"/></rule>
<rule pattern="(^[ \t]*)(?!(?:public|private|protected|default)\b)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+)(\s*)(:)(?!:)"><bygroups><token type="TextWhitespace"/><token type="NameLabel"/><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
<rule pattern="\n"><token type="TextWhitespace"/></rule>
<rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
<rule pattern="\\\n"><token type="Text"/></rule>
<rule pattern="//(?:.|(?&lt;=\\)\n)*\n"><token type="CommentSingle"/></rule>
<rule pattern="/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/"><token type="CommentMultiline"/></rule>
<rule pattern="/(\\\n)?[*][\w\W]*"><token type="CommentMultiline"/></rule>
</state>
<state name="root">
<rule><include state="whitespace"/></rule>
<rule><include state="keywords"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;{/&quot;\&#x27;]*)(\{)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups><push state="function"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;/&quot;\&#x27;]*)(;)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups></rule>
<rule><include state="types"/></rule>
<rule><push state="statement"/></rule>
</state>
<state name="statement">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern="\}"><token type="Punctuation"/></rule>
<rule pattern="[{;]"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="function">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern=";"><token type="Punctuation"/></rule>
<rule pattern="\{"><token type="Punctuation"/><push/></rule>
<rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralString"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule pattern="[^\\&quot;\n]+"><token type="LiteralString"/></rule>
<rule pattern="\\\n"><token type="LiteralString"/></rule>
<rule pattern="\\"><token type="LiteralString"/></rule>
</state>
<state name="macro">
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&quot;[^&quot;]+&quot;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&lt;[^&gt;]+&gt;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="[^/\n]+"><token type="CommentPreproc"/></rule>
<rule pattern="/[*](.|\n)*?[*]/"><token type="CommentMultiline"/></rule>
<rule pattern="//.*?\n"><token type="CommentSingle"/><pop depth="1"/></rule>
<rule pattern="/"><token type="CommentPreproc"/></rule>
<rule pattern="(?&lt;=\\)\n"><token type="CommentPreproc"/></rule>
<rule pattern="\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><push/></rule>
<rule pattern="^\s*#el(?:se|if).*\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern=".*?\n"><token type="Comment"/></rule>
</state>
<state name="classname">
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameClass"/><pop depth="1"/></rule>
<rule pattern="\s*(?=&gt;)"><token type="Text"/><pop depth="1"/></rule>
<rule><pop depth="1"/></rule>
</state>
<state name="case-value">
<rule pattern="(?&lt;!:)(:)(?!:)"><token type="Punctuation"/><pop depth="1"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameConstant"/></rule>
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,94 @@
<lexer>
<config>
<name>Rego</name>
<alias>rego</alias>
<filename>*.rego</filename>
</config>
<rules>
<state name="root">
<rule pattern="(package|import|as|not|with|default|else|some|in|if|contains)\b">
<token type="KeywordDeclaration"/>
</rule>
<!-- importing keywords should then show up as keywords -->
<rule pattern="(import)( future.keywords.)(\w+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
<token type="KeywordDeclaration"/>
</bygroups>
</rule>
<rule pattern="#[^\r\n]*">
<token type="Comment"/>
</rule>
<rule pattern="(FIXME|TODO|XXX)\b( .*)$">
<bygroups>
<token type="Error"/>
<token type="CommentSpecial"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+[Ee][-+]\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\.\d+([eE][+\-]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(0|[1-9][0-9]*)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;.*?&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\$/((?!/\$).)*/\$">
<token type="LiteralString"/>
</rule>
<rule pattern="/(\\\\|\\&#34;|[^/])*/">
<token type="LiteralString"/>
</rule>
<rule pattern="^(\w+)">
<token type="Name"/>
</rule>
<rule pattern="[a-z_-][\w-]*(?=\()">
<token type="NameFunction"/>
</rule>
<rule pattern="[\r\n\s]+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="(package|import)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="[=&lt;&gt;!+-/*&amp;|]">
<token type="Operator"/>
</rule>
<rule pattern=":=">
<token type="Operator"/>
</rule>
<rule pattern="[[\]{}():;]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,58 @@
<lexer>
<config>
<name>RPMSpec</name>
<alias>spec</alias>
<filename>*.spec</filename>
<mime_type>text/x-rpm-spec</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="#.*$"><token type="Comment"/></rule>
<rule><include state="basic"/></rule>
</state>
<state name="description">
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="changelog">
<rule pattern="\*.*$"><token type="GenericSubheading"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralStringDouble"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="."><token type="LiteralStringDouble"/></rule>
</state>
<state name="basic">
<rule><include state="macro"/></rule>
<rule pattern="(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$"><bygroups><token type="GenericHeading"/><token type="Punctuation"/><usingself state="root"/></bygroups></rule>
<rule pattern="^%description"><token type="NameDecorator"/><push state="description"/></rule>
<rule pattern="^%changelog"><token type="NameDecorator"/><push state="changelog"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups></rule>
<rule pattern="%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)"><token type="Keyword"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="&#x27;.*?&#x27;"><token type="LiteralStringSingle"/></rule>
<rule pattern="&quot;"><token type="LiteralStringDouble"/><push state="string"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="macro">
<rule pattern="%define.*$"><token type="CommentPreproc"/></rule>
<rule pattern="%\{\!\?.*%define.*\}"><token type="CommentPreproc"/></rule>
<rule pattern="(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$"><bygroups><token type="CommentPreproc"/><token type="Text"/></bygroups></rule>
</state>
<state name="interpol">
<rule pattern="%\{?__[a-z_]+\}?"><token type="NameFunction"/></rule>
<rule pattern="%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?"><token type="KeywordPseudo"/></rule>
<rule pattern="%\{\?\w+\}"><token type="NameVariable"/></rule>
<rule pattern="\$\{?RPM_[A-Z0-9_]+\}?"><token type="NameVariableGlobal"/></rule>
<rule pattern="%\{[a-zA-Z]\w+\}"><token type="KeywordConstant"/></rule>
</state>
</rules>
</lexer>

View File

@ -55,7 +55,7 @@ func goRules() Rules {
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
{`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil},
},
}

View File

@ -1,11 +1,18 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [
"config:recommended",
":semanticCommits",
":semanticCommitTypeAll(chore)",
":semanticCommitScope(deps)",
"group:allNonMajor",
"schedule:earlyMondays", // Run once a week.
],
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [
"config:recommended",
":semanticCommits",
":semanticCommitTypeAll(chore)",
":semanticCommitScope(deps)",
"group:allNonMajor",
"schedule:earlyMondays", // Run once a week.
],
packageRules: [
{
matchPackageNames: ["golangci-lint"],
matchManagers: ["hermit"],
enabled: false,
},
],
}

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#c6d0f5"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#51576d"/>
<entry type="LineHighlight" style="bg:#51576d"/>
<entry type="LineNumbersTable" style="#838ba7"/>
<entry type="LineNumbers" style="#838ba7"/>
<entry type="Keyword" style="#ca9ee6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#4c4f69"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#bcc0cc"/>
<entry type="LineHighlight" style="bg:#bcc0cc"/>
<entry type="LineNumbersTable" style="#8c8fa1"/>
<entry type="LineNumbers" style="#8c8fa1"/>
<entry type="Keyword" style="#8839ef"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cad3f5"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#494d64"/>
<entry type="LineHighlight" style="bg:#494d64"/>
<entry type="LineNumbersTable" style="#8087a2"/>
<entry type="LineNumbers" style="#8087a2"/>
<entry type="Keyword" style="#c6a0f6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cdd6f4"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#45475a"/>
<entry type="LineHighlight" style="bg:#45475a"/>
<entry type="LineNumbersTable" style="#7f849c"/>
<entry type="LineNumbers" style="#7f849c"/>
<entry type="Keyword" style="#cba6f7"/>

View File

@ -1,6 +1,6 @@
<style name="github-dark">
<entry type="Error" style="#f85149"/>
<entry type="LineHighlight" style="#6e7681"/>
<entry type="LineHighlight" style="bg:#6e7681"/>
<entry type="LineNumbers" style="#6e7681"/>
<entry type="Background" style="#e6edf3 bg:#0d1117"/>
<entry type="Keyword" style="#ff7b72"/>
@ -42,4 +42,4 @@
<entry type="GenericTraceback" style="#ff7b72"/>
<entry type="GenericUnderline" style="underline"/>
<entry type="TextWhitespace" style="#6e7681"/>
</style>
</style>

View File

@ -4,6 +4,12 @@ Notable changes between releases.
## Latest
## v0.7.3
* Percent encode special characters in HMAC-SHA1 secrets ([#72](https://github.com/dghubble/oauth1/pull/72))
* Strip whitespace from request token body ([#56](https://github.com/dghubble/oauth1/pull/56))
* Update Go module dependencies
## v0.7.2
* Update minimum Go version from v1.17 to v1.18 ([#66](https://github.com/dghubble/oauth1/pull/66))

View File

@ -1,4 +1,8 @@
# OAuth1 [![GoDoc](https://pkg.go.dev/badge/github.com/dghubble/oauth1.svg)](https://pkg.go.dev/github.com/dghubble/oauth1) [![Workflow](https://github.com/dghubble/oauth1/actions/workflows/test.yaml/badge.svg)](https://github.com/dghubble/oauth1/actions/workflows/test.yaml?query=branch%3Amain) [![Sponsors](https://img.shields.io/github/sponsors/dghubble?logo=github)](https://github.com/sponsors/dghubble) [![Mastodon](https://img.shields.io/badge/follow-news-6364ff?logo=mastodon)](https://fosstodon.org/@typhoon)
# OAuth1
[![GoDoc](https://pkg.go.dev/badge/github.com/dghubble/oauth1.svg)](https://pkg.go.dev/github.com/dghubble/oauth1)
[![Workflow](https://github.com/dghubble/oauth1/actions/workflows/test.yaml/badge.svg)](https://github.com/dghubble/oauth1/actions/workflows/test.yaml?query=branch%3Amain)
[![Sponsors](https://img.shields.io/github/sponsors/dghubble?logo=github)](https://github.com/sponsors/dghubble)
[![Mastodon](https://img.shields.io/badge/follow-news-6364ff?logo=mastodon)](https://fosstodon.org/@typhoon)
<img align="right" src="https://storage.googleapis.com/dghubble/oauth1.png">

View File

@ -7,6 +7,7 @@ import (
"io/ioutil"
"net/http"
"net/url"
"strings"
)
const (
@ -89,7 +90,7 @@ func (c *Config) RequestToken() (requestToken, requestSecret string, err error)
}
// ParseQuery to decode URL-encoded application/x-www-form-urlencoded body
values, err := url.ParseQuery(string(body))
values, err := url.ParseQuery(strings.TrimSpace(string(body)))
if err != nil {
return "", "", err
}

View File

@ -32,7 +32,7 @@ func (s *HMACSigner) Name() string {
}
func hmacSign(consumerSecret, tokenSecret, message string, algo func() hash.Hash) (string, error) {
signingKey := strings.Join([]string{consumerSecret, tokenSecret}, "&")
signingKey := strings.Join([]string{PercentEncode(consumerSecret), PercentEncode(tokenSecret)}, "&")
mac := hmac.New(algo, []byte(signingKey))
mac.Write([]byte(message))
signatureBytes := mac.Sum(nil)

View File

@ -18,8 +18,12 @@ import (
"github.com/dlclark/regexp2/syntax"
)
// Default timeout used when running regexp matches -- "forever"
var DefaultMatchTimeout = time.Duration(math.MaxInt64)
var (
// DefaultMatchTimeout used when running regexp matches -- "forever"
DefaultMatchTimeout = time.Duration(math.MaxInt64)
// DefaultUnmarshalOptions used when unmarshaling a regex from text
DefaultUnmarshalOptions = None
)
// Regexp is the representation of a compiled regular expression.
// A Regexp is safe for concurrent use by multiple goroutines.
@ -43,7 +47,7 @@ type Regexp struct {
code *syntax.Code // compiled program
// cache of machines for running regexp
muRun sync.Mutex
muRun *sync.Mutex
runner []*runner
}
@ -72,6 +76,7 @@ func Compile(expr string, opt RegexOptions) (*Regexp, error) {
capsize: code.Capsize,
code: code,
MatchTimeout: DefaultMatchTimeout,
muRun: &sync.Mutex{},
}, nil
}
@ -371,3 +376,20 @@ func (re *Regexp) GroupNumberFromName(name string) int {
return -1
}
// MarshalText implements [encoding.TextMarshaler]. The output
// matches that of calling the [Regexp.String] method.
func (re *Regexp) MarshalText() ([]byte, error) {
return []byte(re.String()), nil
}
// UnmarshalText implements [encoding.TextUnmarshaler] by calling
// [Compile] on the encoded value.
func (re *Regexp) UnmarshalText(text []byte) error {
newRE, err := Compile(string(text), DefaultUnmarshalOptions)
if err != nil {
return err
}
*re = *newRE
return nil
}

View File

@ -80,4 +80,10 @@ issues:
- text: 'Deferring unsafe method "Close" on type "io.ReadCloser"'
linters:
- gosec
- linters:
- unparam
- unused
- revive
path: _test\.go$
text: "unused-parameter"
exclude-use-default: false

View File

@ -89,7 +89,7 @@ func (fs *LocalFS) Remove(avatar string) error {
// note: id includes .image suffix
func (fs *LocalFS) List() (ids []string, err error) {
err = filepath.Walk(fs.storePath,
func(path string, info os.FileInfo, err error) error {
func(_ string, info os.FileInfo, err error) error {
if err != nil {
return err
}

View File

@ -16,7 +16,7 @@ type Func func(format string, args ...interface{})
func (f Func) Logf(format string, args ...interface{}) { f(format, args...) }
// NoOp logger
var NoOp = Func(func(format string, args ...interface{}) {})
var NoOp = Func(func(string, ...interface{}) {})
// Std logger sends to std default logger directly
var Std = Func(func(format string, args ...interface{}) { log.Printf(format, args...) })

View File

@ -199,7 +199,7 @@ func NewMicrosoft(p Params) Oauth2Handler {
infoURL: "https://graph.microsoft.com/v1.0/me",
// non-beta doesn't provide photo for consumers yet
// see https://github.com/microsoftgraph/microsoft-graph-docs/issues/3990
mapUser: func(data UserData, b []byte) token.User {
mapUser: func(data UserData, _ []byte) token.User {
userInfo := token.User{
ID: "microsoft_" + token.HashID(sha1.New(), data.Value("id")),
Name: data.Value("displayName"),

View File

@ -3,7 +3,6 @@
before:
hooks:
- ./gen.sh
- go install mvdan.cc/garble@v0.10.1
builds:
-
@ -32,7 +31,6 @@ builds:
- mips64le
goarm:
- 7
gobinary: garble
-
id: "s2d"
binary: s2d
@ -59,7 +57,6 @@ builds:
- mips64le
goarm:
- 7
gobinary: garble
-
id: "s2sx"
binary: s2sx
@ -87,7 +84,6 @@ builds:
- mips64le
goarm:
- 7
gobinary: garble
archives:
-

View File

@ -16,6 +16,30 @@ This package provides various compression algorithms.
# changelog
* Feb 5th, 2024 - [1.17.6](https://github.com/klauspost/compress/releases/tag/v1.17.6)
* zstd: Fix incorrect repeat coding in best mode https://github.com/klauspost/compress/pull/923
* s2: Fix DecodeConcurrent deadlock on errors https://github.com/klauspost/compress/pull/925
* Jan 26th, 2024 - [v1.17.5](https://github.com/klauspost/compress/releases/tag/v1.17.5)
* flate: Fix reset with dictionary on custom window encodes https://github.com/klauspost/compress/pull/912
* zstd: Add Frame header encoding and stripping https://github.com/klauspost/compress/pull/908
* zstd: Limit better/best default window to 8MB https://github.com/klauspost/compress/pull/913
* zstd: Speed improvements by @greatroar in https://github.com/klauspost/compress/pull/896 https://github.com/klauspost/compress/pull/910
* s2: Fix callbacks for skippable blocks and disallow 0xfe (Padding) by @Jille in https://github.com/klauspost/compress/pull/916 https://github.com/klauspost/compress/pull/917
https://github.com/klauspost/compress/pull/919 https://github.com/klauspost/compress/pull/918
* Dec 1st, 2023 - [v1.17.4](https://github.com/klauspost/compress/releases/tag/v1.17.4)
* huff0: Speed up symbol counting by @greatroar in https://github.com/klauspost/compress/pull/887
* huff0: Remove byteReader by @greatroar in https://github.com/klauspost/compress/pull/886
* gzhttp: Allow overriding decompression on transport https://github.com/klauspost/compress/pull/892
* gzhttp: Clamp compression level https://github.com/klauspost/compress/pull/890
* gzip: Error out if reserved bits are set https://github.com/klauspost/compress/pull/891
* Nov 15th, 2023 - [v1.17.3](https://github.com/klauspost/compress/releases/tag/v1.17.3)
* fse: Fix max header size https://github.com/klauspost/compress/pull/881
* zstd: Improve better/best compression https://github.com/klauspost/compress/pull/877
* gzhttp: Fix missing content type on Close https://github.com/klauspost/compress/pull/883
* Oct 22nd, 2023 - [v1.17.2](https://github.com/klauspost/compress/releases/tag/v1.17.2)
* zstd: Fix rare *CORRUPTION* output in "best" mode. See https://github.com/klauspost/compress/pull/876
@ -554,7 +578,7 @@ For direct deflate use, NewStatelessWriter and StatelessDeflate are available. S
A `bufio.Writer` can of course be used to control write sizes. For example, to use a 4KB buffer:
```
```go
// replace 'ioutil.Discard' with your output.
gzw, err := gzip.NewWriterLevel(ioutil.Discard, gzip.StatelessCompression)
if err != nil {

View File

@ -1,4 +1,4 @@
module github.com/klauspost/compress
go 1.16
go 1.19

View File

@ -95,42 +95,54 @@ type Header struct {
// If there isn't enough input, io.ErrUnexpectedEOF is returned.
// The FirstBlock.OK will indicate if enough information was available to decode the first block header.
func (h *Header) Decode(in []byte) error {
_, err := h.DecodeAndStrip(in)
return err
}
// DecodeAndStrip will decode the header from the beginning of the stream
// and on success return the remaining bytes.
// This will decode the frame header and the first block header if enough bytes are provided.
// It is recommended to provide at least HeaderMaxSize bytes.
// If the frame header cannot be read an error will be returned.
// If there isn't enough input, io.ErrUnexpectedEOF is returned.
// The FirstBlock.OK will indicate if enough information was available to decode the first block header.
func (h *Header) DecodeAndStrip(in []byte) (remain []byte, err error) {
*h = Header{}
if len(in) < 4 {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
h.HeaderSize += 4
b, in := in[:4], in[4:]
if string(b) != frameMagic {
if string(b[1:4]) != skippableFrameMagic || b[0]&0xf0 != 0x50 {
return ErrMagicMismatch
return nil, ErrMagicMismatch
}
if len(in) < 4 {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
h.HeaderSize += 4
h.Skippable = true
h.SkippableID = int(b[0] & 0xf)
h.SkippableSize = binary.LittleEndian.Uint32(in)
return nil
return in[4:], nil
}
// Read Window_Descriptor
// https://github.com/facebook/zstd/blob/dev/doc/zstd_compression_format.md#window_descriptor
if len(in) < 1 {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
fhd, in := in[0], in[1:]
h.HeaderSize++
h.SingleSegment = fhd&(1<<5) != 0
h.HasCheckSum = fhd&(1<<2) != 0
if fhd&(1<<3) != 0 {
return errors.New("reserved bit set on frame header")
return nil, errors.New("reserved bit set on frame header")
}
if !h.SingleSegment {
if len(in) < 1 {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
var wd byte
wd, in = in[0], in[1:]
@ -148,7 +160,7 @@ func (h *Header) Decode(in []byte) error {
size = 4
}
if len(in) < int(size) {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
b, in = in[:size], in[size:]
h.HeaderSize += int(size)
@ -178,7 +190,7 @@ func (h *Header) Decode(in []byte) error {
if fcsSize > 0 {
h.HasFCS = true
if len(in) < fcsSize {
return io.ErrUnexpectedEOF
return nil, io.ErrUnexpectedEOF
}
b, in = in[:fcsSize], in[fcsSize:]
h.HeaderSize += int(fcsSize)
@ -199,7 +211,7 @@ func (h *Header) Decode(in []byte) error {
// Frame Header done, we will not fail from now on.
if len(in) < 3 {
return nil
return in, nil
}
tmp := in[:3]
bh := uint32(tmp[0]) | (uint32(tmp[1]) << 8) | (uint32(tmp[2]) << 16)
@ -209,7 +221,7 @@ func (h *Header) Decode(in []byte) error {
cSize := int(bh >> 3)
switch blockType {
case blockTypeReserved:
return nil
return in, nil
case blockTypeRLE:
h.FirstBlock.Compressed = true
h.FirstBlock.DecompressedSize = cSize
@ -225,5 +237,25 @@ func (h *Header) Decode(in []byte) error {
}
h.FirstBlock.OK = true
return nil
return in, nil
}
// AppendTo will append the encoded header to the dst slice.
// There is no error checking performed on the header values.
func (h *Header) AppendTo(dst []byte) ([]byte, error) {
if h.Skippable {
magic := [4]byte{0x50, 0x2a, 0x4d, 0x18}
magic[0] |= byte(h.SkippableID & 0xf)
dst = append(dst, magic[:]...)
f := h.SkippableSize
return append(dst, uint8(f), uint8(f>>8), uint8(f>>16), uint8(f>>24)), nil
}
f := frameHeader{
ContentSize: h.FrameContentSize,
WindowSize: uint32(h.WindowSize),
SingleSegment: h.SingleSegment,
Checksum: h.HasCheckSum,
DictID: h.DictionaryID,
}
return f.appendTo(dst), nil
}

View File

@ -201,14 +201,6 @@ encodeLoop:
if delta >= e.maxMatchOff || delta <= 0 || load3232(src, offset) != first {
return
}
if debugAsserts {
if offset >= s {
panic(fmt.Sprintf("offset: %d - s:%d - rep: %d - cur :%d - max: %d", offset, s, rep, e.cur, e.maxMatchOff))
}
if !bytes.Equal(src[s:s+4], src[offset:offset+4]) {
panic(fmt.Sprintf("first match mismatch: %v != %v, first: %08x", src[s:s+4], src[offset:offset+4], first))
}
}
// Try to quick reject if we already have a long match.
if m.length > 16 {
left := len(src) - int(m.s+m.length)
@ -227,8 +219,10 @@ encodeLoop:
}
}
l := 4 + e.matchlen(s+4, offset+4, src)
if true {
if m.rep <= 0 {
// Extend candidate match backwards as far as possible.
// Do not extend repeats as we can assume they are optimal
// and offsets change if s == nextEmit.
tMin := s - e.maxMatchOff
if tMin < 0 {
tMin = 0
@ -239,7 +233,14 @@ encodeLoop:
l++
}
}
if debugAsserts {
if offset >= s {
panic(fmt.Sprintf("offset: %d - s:%d - rep: %d - cur :%d - max: %d", offset, s, rep, e.cur, e.maxMatchOff))
}
if !bytes.Equal(src[s:s+l], src[offset:offset+l]) {
panic(fmt.Sprintf("second match mismatch: %v != %v, first: %08x", src[s:s+4], src[offset:offset+4], first))
}
}
cand := match{offset: offset, s: s, length: l, rep: rep}
cand.estBits(bitsPerByte)
if m.est >= highScore || cand.est-m.est+(cand.s-m.s)*bitsPerByte>>10 < 0 {
@ -336,24 +337,31 @@ encodeLoop:
}
if debugAsserts {
if best.offset >= best.s {
panic(fmt.Sprintf("best.offset > s: %d >= %d", best.offset, best.s))
}
if best.s < nextEmit {
panic(fmt.Sprintf("s %d < nextEmit %d", best.s, nextEmit))
}
if best.offset < s-e.maxMatchOff {
panic(fmt.Sprintf("best.offset < s-e.maxMatchOff: %d < %d", best.offset, s-e.maxMatchOff))
}
if !bytes.Equal(src[best.s:best.s+best.length], src[best.offset:best.offset+best.length]) {
panic(fmt.Sprintf("match mismatch: %v != %v", src[best.s:best.s+best.length], src[best.offset:best.offset+best.length]))
}
}
// We have a match, we can store the forward value
s = best.s
if best.rep > 0 {
var seq seq
seq.matchLen = uint32(best.length - zstdMinMatch)
if debugAsserts && s < nextEmit {
panic("s < nextEmit")
}
addLiterals(&seq, best.s)
// Repeat. If bit 4 is set, this is a non-lit repeat.
seq.offset = uint32(best.rep & 3)
if debugSequences {
println("repeat sequence", seq, "next s:", s)
println("repeat sequence", seq, "next s:", best.s, "off:", best.s-best.offset)
}
blk.sequences = append(blk.sequences, seq)
@ -396,7 +404,6 @@ encodeLoop:
// A 4-byte match has been found. Update recent offsets.
// We'll later see if more than 4 bytes.
s = best.s
t := best.offset
offset1, offset2, offset3 = s-t, offset1, offset2

View File

@ -94,7 +94,7 @@ func WithEncoderConcurrency(n int) EOption {
// The value must be a power of two between MinWindowSize and MaxWindowSize.
// A larger value will enable better compression but allocate more memory and,
// for above-default values, take considerably longer.
// The default value is determined by the compression level.
// The default value is determined by the compression level and max 8MB.
func WithWindowSize(n int) EOption {
return func(o *encoderOptions) error {
switch {
@ -232,9 +232,9 @@ func WithEncoderLevel(l EncoderLevel) EOption {
case SpeedDefault:
o.windowSize = 8 << 20
case SpeedBetterCompression:
o.windowSize = 16 << 20
o.windowSize = 8 << 20
case SpeedBestCompression:
o.windowSize = 32 << 20
o.windowSize = 8 << 20
}
}
if !o.customALEntropy {

View File

@ -76,7 +76,7 @@ func (f frameHeader) appendTo(dst []byte) []byte {
if f.SingleSegment {
dst = append(dst, uint8(f.ContentSize))
}
// Unless SingleSegment is set, framessizes < 256 are nto stored.
// Unless SingleSegment is set, framessizes < 256 are not stored.
case 1:
f.ContentSize -= 256
dst = append(dst, uint8(f.ContentSize), uint8(f.ContentSize>>8))

View File

@ -20,10 +20,9 @@ func (s *fseDecoder) buildDtable() error {
if v == -1 {
s.dt[highThreshold].setAddBits(uint8(i))
highThreshold--
symbolNext[i] = 1
} else {
symbolNext[i] = uint16(v)
v = 1
}
symbolNext[i] = uint16(v)
}
}
@ -35,10 +34,12 @@ func (s *fseDecoder) buildDtable() error {
for ss, v := range s.norm[:s.symbolLen] {
for i := 0; i < int(v); i++ {
s.dt[position].setAddBits(uint8(ss))
position = (position + step) & tableMask
for position > highThreshold {
for {
// lowprob area
position = (position + step) & tableMask
if position <= highThreshold {
break
}
}
}
}

View File

@ -157,8 +157,7 @@ sequenceDecs_decode_amd64_ll_update_zero:
// Update Literal Length State
MOVBQZX DI, R14
SHRQ $0x10, DI
MOVWQZX DI, DI
SHRL $0x10, DI
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -177,8 +176,7 @@ sequenceDecs_decode_amd64_ll_update_zero:
// Update Match Length State
MOVBQZX R8, R14
SHRQ $0x10, R8
MOVWQZX R8, R8
SHRL $0x10, R8
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -197,8 +195,7 @@ sequenceDecs_decode_amd64_ll_update_zero:
// Update Offset State
MOVBQZX R9, R14
SHRQ $0x10, R9
MOVWQZX R9, R9
SHRL $0x10, R9
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -459,8 +456,7 @@ sequenceDecs_decode_56_amd64_ll_update_zero:
// Update Literal Length State
MOVBQZX DI, R14
SHRQ $0x10, DI
MOVWQZX DI, DI
SHRL $0x10, DI
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -479,8 +475,7 @@ sequenceDecs_decode_56_amd64_ll_update_zero:
// Update Match Length State
MOVBQZX R8, R14
SHRQ $0x10, R8
MOVWQZX R8, R8
SHRL $0x10, R8
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -499,8 +494,7 @@ sequenceDecs_decode_56_amd64_ll_update_zero:
// Update Offset State
MOVBQZX R9, R14
SHRQ $0x10, R9
MOVWQZX R9, R9
SHRL $0x10, R9
LEAQ (BX)(R14*1), CX
MOVQ DX, R15
MOVQ CX, BX
@ -772,11 +766,10 @@ sequenceDecs_decode_bmi2_fill_2_end:
BZHIQ R14, R15, R15
// Update Offset State
BZHIQ R8, R15, CX
SHRXQ R8, R15, R15
MOVQ $0x00001010, R14
BEXTRQ R14, R8, R8
ADDQ CX, R8
BZHIQ R8, R15, CX
SHRXQ R8, R15, R15
SHRL $0x10, R8
ADDQ CX, R8
// Load ctx.ofTable
MOVQ ctx+16(FP), CX
@ -784,11 +777,10 @@ sequenceDecs_decode_bmi2_fill_2_end:
MOVQ (CX)(R8*8), R8
// Update Match Length State
BZHIQ DI, R15, CX
SHRXQ DI, R15, R15
MOVQ $0x00001010, R14
BEXTRQ R14, DI, DI
ADDQ CX, DI
BZHIQ DI, R15, CX
SHRXQ DI, R15, R15
SHRL $0x10, DI
ADDQ CX, DI
// Load ctx.mlTable
MOVQ ctx+16(FP), CX
@ -796,10 +788,9 @@ sequenceDecs_decode_bmi2_fill_2_end:
MOVQ (CX)(DI*8), DI
// Update Literal Length State
BZHIQ SI, R15, CX
MOVQ $0x00001010, R14
BEXTRQ R14, SI, SI
ADDQ CX, SI
BZHIQ SI, R15, CX
SHRL $0x10, SI
ADDQ CX, SI
// Load ctx.llTable
MOVQ ctx+16(FP), CX
@ -1032,11 +1023,10 @@ sequenceDecs_decode_56_bmi2_fill_end:
BZHIQ R14, R15, R15
// Update Offset State
BZHIQ R8, R15, CX
SHRXQ R8, R15, R15
MOVQ $0x00001010, R14
BEXTRQ R14, R8, R8
ADDQ CX, R8
BZHIQ R8, R15, CX
SHRXQ R8, R15, R15
SHRL $0x10, R8
ADDQ CX, R8
// Load ctx.ofTable
MOVQ ctx+16(FP), CX
@ -1044,11 +1034,10 @@ sequenceDecs_decode_56_bmi2_fill_end:
MOVQ (CX)(R8*8), R8
// Update Match Length State
BZHIQ DI, R15, CX
SHRXQ DI, R15, R15
MOVQ $0x00001010, R14
BEXTRQ R14, DI, DI
ADDQ CX, DI
BZHIQ DI, R15, CX
SHRXQ DI, R15, R15
SHRL $0x10, DI
ADDQ CX, DI
// Load ctx.mlTable
MOVQ ctx+16(FP), CX
@ -1056,10 +1045,9 @@ sequenceDecs_decode_56_bmi2_fill_end:
MOVQ (CX)(DI*8), DI
// Update Literal Length State
BZHIQ SI, R15, CX
MOVQ $0x00001010, R14
BEXTRQ R14, SI, SI
ADDQ CX, SI
BZHIQ SI, R15, CX
SHRL $0x10, SI
ADDQ CX, SI
// Load ctx.llTable
MOVQ ctx+16(FP), CX
@ -1967,8 +1955,7 @@ sequenceDecs_decodeSync_amd64_ll_update_zero:
// Update Literal Length State
MOVBQZX DI, R13
SHRQ $0x10, DI
MOVWQZX DI, DI
SHRL $0x10, DI
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -1987,8 +1974,7 @@ sequenceDecs_decodeSync_amd64_ll_update_zero:
// Update Match Length State
MOVBQZX R8, R13
SHRQ $0x10, R8
MOVWQZX R8, R8
SHRL $0x10, R8
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -2007,8 +1993,7 @@ sequenceDecs_decodeSync_amd64_ll_update_zero:
// Update Offset State
MOVBQZX R9, R13
SHRQ $0x10, R9
MOVWQZX R9, R9
SHRL $0x10, R9
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -2514,11 +2499,10 @@ sequenceDecs_decodeSync_bmi2_fill_2_end:
BZHIQ R13, R14, R14
// Update Offset State
BZHIQ R8, R14, CX
SHRXQ R8, R14, R14
MOVQ $0x00001010, R13
BEXTRQ R13, R8, R8
ADDQ CX, R8
BZHIQ R8, R14, CX
SHRXQ R8, R14, R14
SHRL $0x10, R8
ADDQ CX, R8
// Load ctx.ofTable
MOVQ ctx+16(FP), CX
@ -2526,11 +2510,10 @@ sequenceDecs_decodeSync_bmi2_fill_2_end:
MOVQ (CX)(R8*8), R8
// Update Match Length State
BZHIQ DI, R14, CX
SHRXQ DI, R14, R14
MOVQ $0x00001010, R13
BEXTRQ R13, DI, DI
ADDQ CX, DI
BZHIQ DI, R14, CX
SHRXQ DI, R14, R14
SHRL $0x10, DI
ADDQ CX, DI
// Load ctx.mlTable
MOVQ ctx+16(FP), CX
@ -2538,10 +2521,9 @@ sequenceDecs_decodeSync_bmi2_fill_2_end:
MOVQ (CX)(DI*8), DI
// Update Literal Length State
BZHIQ SI, R14, CX
MOVQ $0x00001010, R13
BEXTRQ R13, SI, SI
ADDQ CX, SI
BZHIQ SI, R14, CX
SHRL $0x10, SI
ADDQ CX, SI
// Load ctx.llTable
MOVQ ctx+16(FP), CX
@ -3055,8 +3037,7 @@ sequenceDecs_decodeSync_safe_amd64_ll_update_zero:
// Update Literal Length State
MOVBQZX DI, R13
SHRQ $0x10, DI
MOVWQZX DI, DI
SHRL $0x10, DI
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -3075,8 +3056,7 @@ sequenceDecs_decodeSync_safe_amd64_ll_update_zero:
// Update Match Length State
MOVBQZX R8, R13
SHRQ $0x10, R8
MOVWQZX R8, R8
SHRL $0x10, R8
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -3095,8 +3075,7 @@ sequenceDecs_decodeSync_safe_amd64_ll_update_zero:
// Update Offset State
MOVBQZX R9, R13
SHRQ $0x10, R9
MOVWQZX R9, R9
SHRL $0x10, R9
LEAQ (BX)(R13*1), CX
MOVQ DX, R14
MOVQ CX, BX
@ -3704,11 +3683,10 @@ sequenceDecs_decodeSync_safe_bmi2_fill_2_end:
BZHIQ R13, R14, R14
// Update Offset State
BZHIQ R8, R14, CX
SHRXQ R8, R14, R14
MOVQ $0x00001010, R13
BEXTRQ R13, R8, R8
ADDQ CX, R8
BZHIQ R8, R14, CX
SHRXQ R8, R14, R14
SHRL $0x10, R8
ADDQ CX, R8
// Load ctx.ofTable
MOVQ ctx+16(FP), CX
@ -3716,11 +3694,10 @@ sequenceDecs_decodeSync_safe_bmi2_fill_2_end:
MOVQ (CX)(R8*8), R8
// Update Match Length State
BZHIQ DI, R14, CX
SHRXQ DI, R14, R14
MOVQ $0x00001010, R13
BEXTRQ R13, DI, DI
ADDQ CX, DI
BZHIQ DI, R14, CX
SHRXQ DI, R14, R14
SHRL $0x10, DI
ADDQ CX, DI
// Load ctx.mlTable
MOVQ ctx+16(FP), CX
@ -3728,10 +3705,9 @@ sequenceDecs_decodeSync_safe_bmi2_fill_2_end:
MOVQ (CX)(DI*8), DI
// Update Literal Length State
BZHIQ SI, R14, CX
MOVQ $0x00001010, R13
BEXTRQ R13, SI, SI
ADDQ CX, SI
BZHIQ SI, R14, CX
SHRL $0x10, SI
ADDQ CX, SI
// Load ctx.llTable
MOVQ ctx+16(FP), CX

View File

@ -28,6 +28,8 @@ var (
uint32Type = reflect.TypeOf(uint32(1))
uint64Type = reflect.TypeOf(uint64(1))
uintptrType = reflect.TypeOf(uintptr(1))
float32Type = reflect.TypeOf(float32(1))
float64Type = reflect.TypeOf(float64(1))
@ -308,11 +310,11 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
case reflect.Struct:
{
// All structs enter here. We're not interested in most types.
if !canConvert(obj1Value, timeType) {
if !obj1Value.CanConvert(timeType) {
break
}
// time.Time can compared!
// time.Time can be compared!
timeObj1, ok := obj1.(time.Time)
if !ok {
timeObj1 = obj1Value.Convert(timeType).Interface().(time.Time)
@ -328,7 +330,7 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
case reflect.Slice:
{
// We only care about the []byte type.
if !canConvert(obj1Value, bytesType) {
if !obj1Value.CanConvert(bytesType) {
break
}
@ -345,6 +347,26 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
return CompareType(bytes.Compare(bytesObj1, bytesObj2)), true
}
case reflect.Uintptr:
{
uintptrObj1, ok := obj1.(uintptr)
if !ok {
uintptrObj1 = obj1Value.Convert(uintptrType).Interface().(uintptr)
}
uintptrObj2, ok := obj2.(uintptr)
if !ok {
uintptrObj2 = obj2Value.Convert(uintptrType).Interface().(uintptr)
}
if uintptrObj1 > uintptrObj2 {
return compareGreater, true
}
if uintptrObj1 == uintptrObj2 {
return compareEqual, true
}
if uintptrObj1 < uintptrObj2 {
return compareLess, true
}
}
}
return compareEqual, false

View File

@ -1,16 +0,0 @@
//go:build go1.17
// +build go1.17
// TODO: once support for Go 1.16 is dropped, this file can be
// merged/removed with assertion_compare_go1.17_test.go and
// assertion_compare_legacy.go
package assert
import "reflect"
// Wrapper around reflect.Value.CanConvert, for compatibility
// reasons.
func canConvert(value reflect.Value, to reflect.Type) bool {
return value.CanConvert(to)
}

View File

@ -1,16 +0,0 @@
//go:build !go1.17
// +build !go1.17
// TODO: once support for Go 1.16 is dropped, this file can be
// merged/removed with assertion_compare_go1.17_test.go and
// assertion_compare_can_convert.go
package assert
import "reflect"
// Older versions of Go does not have the reflect.Value.CanConvert
// method.
func canConvert(value reflect.Value, to reflect.Type) bool {
return false
}

View File

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package assert
@ -107,7 +104,7 @@ func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{},
return EqualExportedValues(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted")
@ -616,6 +613,16 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf
return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// assert.NotImplementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func NotImplementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return NotImplements(t, interfaceObject, object, append([]interface{}{msg}, args...)...)
}
// NotNilf asserts that the specified object is not nil.
//
// assert.NotNilf(t, err, "error message %s", "formatted")
@ -660,10 +667,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string,
return NotSame(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted")
// assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -747,10 +756,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg
return Same(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted")
// assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()

View File

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package assert
@ -189,7 +186,7 @@ func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface
return EqualExportedValuesf(a.t, expected, actual, msg, args...)
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValues(uint32(123), int32(123))
@ -200,7 +197,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn
return EqualValues(a.t, expected, actual, msgAndArgs...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted")
@ -1221,6 +1218,26 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in
return NotErrorIsf(a.t, err, target, msg, args...)
}
// NotImplements asserts that an object does not implement the specified interface.
//
// a.NotImplements((*MyInterface)(nil), new(MyObject))
func (a *Assertions) NotImplements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotImplements(a.t, interfaceObject, object, msgAndArgs...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// a.NotImplementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func (a *Assertions) NotImplementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotImplementsf(a.t, interfaceObject, object, msg, args...)
}
// NotNil asserts that the specified object is not nil.
//
// a.NotNil(err)
@ -1309,10 +1326,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri
return NotSamef(a.t, expected, actual, msg, args...)
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// a.NotSubset([1, 3, 4], [1, 2])
// a.NotSubset({"x": 1, "y": 2}, {"z": 3})
func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1320,10 +1339,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs
return NotSubset(a.t, list, subset, msgAndArgs...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted")
// a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1483,10 +1504,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string,
return Samef(a.t, expected, actual, msg, args...)
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// a.Subset([1, 2, 3], [1, 2])
// a.Subset({"x": 1, "y": 2}, {"x": 1})
func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1494,10 +1516,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...
return Subset(a.t, list, subset, msgAndArgs...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted")
// a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()

View File

@ -19,7 +19,7 @@ import (
"github.com/davecgh/go-spew/spew"
"github.com/pmezard/go-difflib/difflib"
yaml "gopkg.in/yaml.v3"
"gopkg.in/yaml.v3"
)
//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_format.go.tmpl"
@ -110,7 +110,12 @@ func copyExportedFields(expected interface{}) interface{} {
return result.Interface()
case reflect.Array, reflect.Slice:
result := reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len())
var result reflect.Value
if expectedKind == reflect.Array {
result = reflect.New(reflect.ArrayOf(expectedValue.Len(), expectedType.Elem())).Elem()
} else {
result = reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len())
}
for i := 0; i < expectedValue.Len(); i++ {
index := expectedValue.Index(i)
if isNil(index) {
@ -140,6 +145,8 @@ func copyExportedFields(expected interface{}) interface{} {
// structures.
//
// This function does no assertion of any kind.
//
// Deprecated: Use [EqualExportedValues] instead.
func ObjectsExportedFieldsAreEqual(expected, actual interface{}) bool {
expectedCleaned := copyExportedFields(expected)
actualCleaned := copyExportedFields(actual)
@ -153,17 +160,40 @@ func ObjectsAreEqualValues(expected, actual interface{}) bool {
return true
}
actualType := reflect.TypeOf(actual)
if actualType == nil {
expectedValue := reflect.ValueOf(expected)
actualValue := reflect.ValueOf(actual)
if !expectedValue.IsValid() || !actualValue.IsValid() {
return false
}
expectedValue := reflect.ValueOf(expected)
if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
// Attempt comparison after type conversion
return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
expectedType := expectedValue.Type()
actualType := actualValue.Type()
if !expectedType.ConvertibleTo(actualType) {
return false
}
return false
if !isNumericType(expectedType) || !isNumericType(actualType) {
// Attempt comparison after type conversion
return reflect.DeepEqual(
expectedValue.Convert(actualType).Interface(), actual,
)
}
// If BOTH values are numeric, there are chances of false positives due
// to overflow or underflow. So, we need to make sure to always convert
// the smaller type to a larger type before comparing.
if expectedType.Size() >= actualType.Size() {
return actualValue.Convert(expectedType).Interface() == expected
}
return expectedValue.Convert(actualType).Interface() == actual
}
// isNumericType returns true if the type is one of:
// int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64,
// float32, float64, complex64, complex128
func isNumericType(t reflect.Type) bool {
return t.Kind() >= reflect.Int && t.Kind() <= reflect.Complex128
}
/* CallerInfo is necessary because the assert functions use the testing object
@ -266,7 +296,7 @@ func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
// Aligns the provided message so that all lines after the first line start at the same location as the first line.
// Assumes that the first line starts at the correct location (after carriage return, tab, label, spacer and tab).
// The longestLabelLen parameter specifies the length of the longest label in the output (required becaues this is the
// The longestLabelLen parameter specifies the length of the longest label in the output (required because this is the
// basis on which the alignment occurs).
func indentMessageLines(message string, longestLabelLen int) string {
outBuf := new(bytes.Buffer)
@ -382,6 +412,25 @@ func Implements(t TestingT, interfaceObject interface{}, object interface{}, msg
return true
}
// NotImplements asserts that an object does not implement the specified interface.
//
// assert.NotImplements(t, (*MyInterface)(nil), new(MyObject))
func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
interfaceType := reflect.TypeOf(interfaceObject).Elem()
if object == nil {
return Fail(t, fmt.Sprintf("Cannot check if nil does not implement %v", interfaceType), msgAndArgs...)
}
if reflect.TypeOf(object).Implements(interfaceType) {
return Fail(t, fmt.Sprintf("%T implements %v", object, interfaceType), msgAndArgs...)
}
return true
}
// IsType asserts that the specified objects are of the same type.
func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
@ -496,7 +545,7 @@ func samePointers(first, second interface{}) bool {
// representations appropriate to be presented to the user.
//
// If the values are not of like type, the returned strings will be prefixed
// with the type name, and the value will be enclosed in parenthesis similar
// with the type name, and the value will be enclosed in parentheses similar
// to a type conversion in the Go grammar.
func formatUnequalValues(expected, actual interface{}) (e string, a string) {
if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
@ -523,7 +572,7 @@ func truncatingFormat(data interface{}) string {
return value
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValues(t, uint32(123), int32(123))
@ -566,12 +615,19 @@ func EqualExportedValues(t TestingT, expected, actual interface{}, msgAndArgs ..
return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...)
}
if aType.Kind() == reflect.Ptr {
aType = aType.Elem()
}
if bType.Kind() == reflect.Ptr {
bType = bType.Elem()
}
if aType.Kind() != reflect.Struct {
return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...)
return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...)
}
if bType.Kind() != reflect.Struct {
return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...)
return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...)
}
expected = copyExportedFields(expected)
@ -620,17 +676,6 @@ func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return Fail(t, "Expected value not to be nil.", msgAndArgs...)
}
// containsKind checks if a specified kind in the slice of kinds.
func containsKind(kinds []reflect.Kind, kind reflect.Kind) bool {
for i := 0; i < len(kinds); i++ {
if kind == kinds[i] {
return true
}
}
return false
}
// isNil checks if a specified object is nil or not, without Failing.
func isNil(object interface{}) bool {
if object == nil {
@ -638,16 +683,13 @@ func isNil(object interface{}) bool {
}
value := reflect.ValueOf(object)
kind := value.Kind()
isNilableKind := containsKind(
[]reflect.Kind{
reflect.Chan, reflect.Func,
reflect.Interface, reflect.Map,
reflect.Ptr, reflect.Slice, reflect.UnsafePointer},
kind)
switch value.Kind() {
case
reflect.Chan, reflect.Func,
reflect.Interface, reflect.Map,
reflect.Ptr, reflect.Slice, reflect.UnsafePointer:
if isNilableKind && value.IsNil() {
return true
return value.IsNil()
}
return false
@ -731,16 +773,14 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
}
// getLen try to get length of object.
// return (false, 0) if impossible.
func getLen(x interface{}) (ok bool, length int) {
// getLen tries to get the length of an object.
// It returns (0, false) if impossible.
func getLen(x interface{}) (length int, ok bool) {
v := reflect.ValueOf(x)
defer func() {
if e := recover(); e != nil {
ok = false
}
ok = recover() == nil
}()
return true, v.Len()
return v.Len(), true
}
// Len asserts that the specified object has specific length.
@ -751,13 +791,13 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{})
if h, ok := t.(tHelper); ok {
h.Helper()
}
ok, l := getLen(object)
l, ok := getLen(object)
if !ok {
return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", object), msgAndArgs...)
return Fail(t, fmt.Sprintf("\"%v\" could not be applied builtin len()", object), msgAndArgs...)
}
if l != length {
return Fail(t, fmt.Sprintf("\"%s\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
return Fail(t, fmt.Sprintf("\"%v\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
}
return true
}
@ -919,10 +959,11 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{})
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// assert.Subset(t, [1, 2, 3], [1, 2])
// assert.Subset(t, {"x": 1, "y": 2}, {"x": 1})
func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -975,10 +1016,12 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok
return true
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// assert.NotSubset(t, [1, 3, 4], [1, 2])
// assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3})
func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1439,7 +1482,7 @@ func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAnd
h.Helper()
}
if math.IsNaN(epsilon) {
return Fail(t, "epsilon must not be NaN")
return Fail(t, "epsilon must not be NaN", msgAndArgs...)
}
actualEpsilon, err := calcRelativeError(expected, actual)
if err != nil {
@ -1458,19 +1501,26 @@ func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, m
if h, ok := t.(tHelper); ok {
h.Helper()
}
if expected == nil || actual == nil ||
reflect.TypeOf(actual).Kind() != reflect.Slice ||
reflect.TypeOf(expected).Kind() != reflect.Slice {
if expected == nil || actual == nil {
return Fail(t, "Parameters must be slice", msgAndArgs...)
}
actualSlice := reflect.ValueOf(actual)
expectedSlice := reflect.ValueOf(expected)
actualSlice := reflect.ValueOf(actual)
for i := 0; i < actualSlice.Len(); i++ {
result := InEpsilon(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), epsilon)
if !result {
return result
if expectedSlice.Type().Kind() != reflect.Slice {
return Fail(t, "Expected value must be slice", msgAndArgs...)
}
expectedLen := expectedSlice.Len()
if !IsType(t, expected, actual) || !Len(t, actual, expectedLen) {
return false
}
for i := 0; i < expectedLen; i++ {
if !InEpsilon(t, expectedSlice.Index(i).Interface(), actualSlice.Index(i).Interface(), epsilon, "at index %d", i) {
return false
}
}
@ -1870,23 +1920,18 @@ func (c *CollectT) Errorf(format string, args ...interface{}) {
}
// FailNow panics.
func (c *CollectT) FailNow() {
func (*CollectT) FailNow() {
panic("Assertion failed")
}
// Reset clears the collected errors.
func (c *CollectT) Reset() {
c.errors = nil
// Deprecated: That was a method for internal usage that should not have been published. Now just panics.
func (*CollectT) Reset() {
panic("Reset() is deprecated")
}
// Copy copies the collected errors to the supplied t.
func (c *CollectT) Copy(t TestingT) {
if tt, ok := t.(tHelper); ok {
tt.Helper()
}
for _, err := range c.errors {
t.Errorf("%v", err)
}
// Deprecated: That was a method for internal usage that should not have been published. Now just panics.
func (*CollectT) Copy(TestingT) {
panic("Copy() is deprecated")
}
// EventuallyWithT asserts that given condition will be met in waitFor time,
@ -1912,8 +1957,8 @@ func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time
h.Helper()
}
collect := new(CollectT)
ch := make(chan bool, 1)
var lastFinishedTickErrs []error
ch := make(chan []error, 1)
timer := time.NewTimer(waitFor)
defer timer.Stop()
@ -1924,19 +1969,25 @@ func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time
for tick := ticker.C; ; {
select {
case <-timer.C:
collect.Copy(t)
for _, err := range lastFinishedTickErrs {
t.Errorf("%v", err)
}
return Fail(t, "Condition never satisfied", msgAndArgs...)
case <-tick:
tick = nil
collect.Reset()
go func() {
collect := new(CollectT)
defer func() {
ch <- collect.errors
}()
condition(collect)
ch <- len(collect.errors) == 0
}()
case v := <-ch:
if v {
case errs := <-ch:
if len(errs) == 0 {
return true
}
// Keep the errors from the last ended condition, so that they can be copied to t if timeout is reached.
lastFinishedTickErrs = errs
tick = ticker.C
}
}

View File

@ -12,7 +12,7 @@ import (
// an error if building a new request fails.
func httpCode(handler http.HandlerFunc, method, url string, values url.Values) (int, error) {
w := httptest.NewRecorder()
req, err := http.NewRequest(method, url, nil)
req, err := http.NewRequest(method, url, http.NoBody)
if err != nil {
return -1, err
}
@ -32,12 +32,12 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, value
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isSuccessCode := code >= http.StatusOK && code <= http.StatusPartialContent
if !isSuccessCode {
Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isSuccessCode
@ -54,12 +54,12 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, valu
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect
if !isRedirectCode {
Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isRedirectCode
@ -76,12 +76,12 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isErrorCode := code >= http.StatusBadRequest
if !isErrorCode {
Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isErrorCode
@ -98,12 +98,12 @@ func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, va
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
successful := code == statuscode
if !successful {
Fail(t, fmt.Sprintf("Expected HTTP status code %d for %q but received %d", statuscode, url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP status code %d for %q but received %d", statuscode, url+"?"+values.Encode(), code), msgAndArgs...)
}
return successful
@ -113,7 +113,10 @@ func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, va
// empty string if building a new request fails.
func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string {
w := httptest.NewRecorder()
req, err := http.NewRequest(method, url+"?"+values.Encode(), nil)
if len(values) > 0 {
url += "?" + values.Encode()
}
req, err := http.NewRequest(method, url, http.NoBody)
if err != nil {
return ""
}
@ -135,7 +138,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string,
contains := strings.Contains(body, fmt.Sprint(str))
if !contains {
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...)
}
return contains
@ -155,7 +158,7 @@ func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url strin
contains := strings.Contains(body, fmt.Sprint(str))
if contains {
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...)
}
return !contains

View File

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package require
@ -235,7 +232,7 @@ func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{},
t.FailNow()
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValues(t, uint32(123), int32(123))
@ -249,7 +246,7 @@ func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArg
t.FailNow()
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted")
@ -1546,6 +1543,32 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf
t.FailNow()
}
// NotImplements asserts that an object does not implement the specified interface.
//
// assert.NotImplements(t, (*MyInterface)(nil), new(MyObject))
func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if assert.NotImplements(t, interfaceObject, object, msgAndArgs...) {
return
}
t.FailNow()
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// assert.NotImplementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func NotImplementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if assert.NotImplementsf(t, interfaceObject, object, msg, args...) {
return
}
t.FailNow()
}
// NotNil asserts that the specified object is not nil.
//
// assert.NotNil(t, err)
@ -1658,10 +1681,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string,
t.FailNow()
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// assert.NotSubset(t, [1, 3, 4], [1, 2])
// assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3})
func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1672,10 +1697,12 @@ func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...i
t.FailNow()
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted")
// assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1880,10 +1907,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg
t.FailNow()
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// assert.Subset(t, [1, 2, 3], [1, 2])
// assert.Subset(t, {"x": 1, "y": 2}, {"x": 1})
func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1894,10 +1922,11 @@ func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...inte
t.FailNow()
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted")
// assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()

View File

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package require
@ -190,7 +187,7 @@ func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface
EqualExportedValuesf(a.t, expected, actual, msg, args...)
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValues(uint32(123), int32(123))
@ -201,7 +198,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn
EqualValues(a.t, expected, actual, msgAndArgs...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted")
@ -1222,6 +1219,26 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in
NotErrorIsf(a.t, err, target, msg, args...)
}
// NotImplements asserts that an object does not implement the specified interface.
//
// a.NotImplements((*MyInterface)(nil), new(MyObject))
func (a *Assertions) NotImplements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
NotImplements(a.t, interfaceObject, object, msgAndArgs...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// a.NotImplementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func (a *Assertions) NotImplementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
NotImplementsf(a.t, interfaceObject, object, msg, args...)
}
// NotNil asserts that the specified object is not nil.
//
// a.NotNil(err)
@ -1310,10 +1327,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri
NotSamef(a.t, expected, actual, msg, args...)
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// a.NotSubset([1, 3, 4], [1, 2])
// a.NotSubset({"x": 1, "y": 2}, {"z": 3})
func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1321,10 +1340,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs
NotSubset(a.t, list, subset, msgAndArgs...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted")
// a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1484,10 +1505,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string,
Samef(a.t, expected, actual, msg, args...)
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// a.Subset([1, 2, 3], [1, 2])
// a.Subset({"x": 1, "y": 2}, {"x": 1})
func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1495,10 +1517,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...
Subset(a.t, list, subset, msgAndArgs...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted")
// a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()

View File

@ -162,12 +162,17 @@ func (b *Bucket) CreateBucket(key []byte) (*Bucket, error) {
return nil, ErrBucketNameRequired
}
// Insert into node.
// Tip: Use a new variable `newKey` instead of reusing the existing `key` to prevent
// it from being marked as leaking, and accordingly cannot be allocated on stack.
newKey := cloneBytes(key)
// Move cursor to correct position.
c := b.Cursor()
k, _, flags := c.seek(key)
k, _, flags := c.seek(newKey)
// Return an error if there is an existing key.
if bytes.Equal(key, k) {
if bytes.Equal(newKey, k) {
if (flags & bucketLeafFlag) != 0 {
return nil, ErrBucketExists
}
@ -182,16 +187,14 @@ func (b *Bucket) CreateBucket(key []byte) (*Bucket, error) {
}
var value = bucket.write()
// Insert into node.
key = cloneBytes(key)
c.node().put(key, key, value, 0, bucketLeafFlag)
c.node().put(newKey, newKey, value, 0, bucketLeafFlag)
// Since subbuckets are not allowed on inline buckets, we need to
// dereference the inline page, if it exists. This will cause the bucket
// to be treated as a regular, non-inline bucket for the rest of the tx.
b.page = nil
return b.Bucket(key), nil
return b.Bucket(newKey), nil
}
// CreateBucketIfNotExists creates a new bucket if it doesn't already exist and returns a reference to it.
@ -288,18 +291,23 @@ func (b *Bucket) Put(key []byte, value []byte) error {
return ErrValueTooLarge
}
// Insert into node.
// Tip: Use a new variable `newKey` instead of reusing the existing `key` to prevent
// it from being marked as leaking, and accordingly cannot be allocated on stack.
newKey := cloneBytes(key)
// Move cursor to correct position.
c := b.Cursor()
k, _, flags := c.seek(key)
k, _, flags := c.seek(newKey)
// Return an error if there is an existing key with a bucket value.
if bytes.Equal(key, k) && (flags&bucketLeafFlag) != 0 {
if bytes.Equal(newKey, k) && (flags&bucketLeafFlag) != 0 {
return ErrIncompatibleValue
}
// Insert into node.
key = cloneBytes(key)
c.node().put(key, key, value, 0, 0)
// gofail: var beforeBucketPut struct{}
c.node().put(newKey, newKey, value, 0, 0)
return nil
}

View File

@ -15,16 +15,15 @@ import (
// ArrayCodec is the Codec used for bsoncore.Array values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// ArrayCodec registered.
// Deprecated: ArrayCodec will not be directly accessible in Go Driver 2.0.
type ArrayCodec struct{}
var defaultArrayCodec = NewArrayCodec()
// NewArrayCodec returns an ArrayCodec.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// ArrayCodec registered.
// Deprecated: NewArrayCodec will not be available in Go Driver 2.0. See
// [ArrayCodec] for more details.
func NewArrayCodec() *ArrayCodec {
return &ArrayCodec{}
}

View File

@ -17,13 +17,28 @@ import (
// ByteSliceCodec is the Codec used for []byte values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// ByteSliceCodec registered.
// Deprecated: ByteSliceCodec will not be directly configurable in Go Driver
// 2.0. To configure the byte slice encode and decode behavior, use the
// configuration methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the byte slice
// encode and decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to encode nil byte slices as empty
// BSON binary values, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// NilByteSliceAsEmpty: true,
// })
//
// See the deprecation notice for each field in ByteSliceCodec for the
// corresponding settings.
type ByteSliceCodec struct {
// EncodeNilAsEmpty causes EncodeValue to marshal nil Go byte slices as empty BSON binary values
// instead of BSON null.
//
// Deprecated: Use bson.Encoder.NilByteSliceAsEmpty instead.
// Deprecated: Use bson.Encoder.NilByteSliceAsEmpty or options.BSONOptions.NilByteSliceAsEmpty
// instead.
EncodeNilAsEmpty bool
}
@ -38,8 +53,8 @@ var (
// NewByteSliceCodec returns a ByteSliceCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// ByteSliceCodec registered.
// Deprecated: NewByteSliceCodec will not be available in Go Driver 2.0. See
// [ByteSliceCodec] for more details.
func NewByteSliceCodec(opts ...*bsonoptions.ByteSliceCodecOptions) *ByteSliceCodec {
byteSliceOpt := bsonoptions.MergeByteSliceCodecOptions(opts...)
codec := ByteSliceCodec{}

View File

@ -41,7 +41,7 @@ func newDefaultStructCodec() *StructCodec {
if err != nil {
// This function is called from the codec registration path, so errors can't be propagated. If there's an error
// constructing the StructCodec, we panic to avoid losing it.
panic(fmt.Errorf("error creating default StructCodec: %v", err))
panic(fmt.Errorf("error creating default StructCodec: %w", err))
}
return codec
}
@ -178,7 +178,7 @@ func (dvd DefaultValueDecoders) DDecodeValue(dc DecodeContext, vr bsonrw.ValueRe
for {
key, elemVr, err := dr.ReadElement()
if err == bsonrw.ErrEOD {
if errors.Is(err, bsonrw.ErrEOD) {
break
} else if err != nil {
return err
@ -1379,7 +1379,7 @@ func (dvd DefaultValueDecoders) MapDecodeValue(dc DecodeContext, vr bsonrw.Value
keyType := val.Type().Key()
for {
key, vr, err := dr.ReadElement()
if err == bsonrw.ErrEOD {
if errors.Is(err, bsonrw.ErrEOD) {
break
}
if err != nil {
@ -1675,7 +1675,7 @@ func (dvd DefaultValueDecoders) decodeDefault(dc DecodeContext, vr bsonrw.ValueR
idx := 0
for {
vr, err := ar.ReadValue()
if err == bsonrw.ErrEOA {
if errors.Is(err, bsonrw.ErrEOA) {
break
}
if err != nil {
@ -1787,7 +1787,7 @@ func (DefaultValueDecoders) decodeElemsFromDocumentReader(dc DecodeContext, dr b
elems := make([]reflect.Value, 0)
for {
key, vr, err := dr.ReadElement()
if err == bsonrw.ErrEOD {
if errors.Is(err, bsonrw.ErrEOD) {
break
}
if err != nil {

View File

@ -343,7 +343,7 @@ func (dve DefaultValueEncoders) mapEncodeValue(ec EncodeContext, dw bsonrw.Docum
}
currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.MapIndex(key))
if lookupErr != nil && lookupErr != errInvalidValue {
if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
return lookupErr
}
@ -352,7 +352,7 @@ func (dve DefaultValueEncoders) mapEncodeValue(ec EncodeContext, dw bsonrw.Docum
return err
}
if lookupErr == errInvalidValue {
if errors.Is(lookupErr, errInvalidValue) {
err = vw.WriteNull()
if err != nil {
return err
@ -418,7 +418,7 @@ func (dve DefaultValueEncoders) ArrayEncodeValue(ec EncodeContext, vw bsonrw.Val
for idx := 0; idx < val.Len(); idx++ {
currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
if lookupErr != nil && lookupErr != errInvalidValue {
if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
return lookupErr
}
@ -427,7 +427,7 @@ func (dve DefaultValueEncoders) ArrayEncodeValue(ec EncodeContext, vw bsonrw.Val
return err
}
if lookupErr == errInvalidValue {
if errors.Is(lookupErr, errInvalidValue) {
err = vw.WriteNull()
if err != nil {
return err
@ -487,7 +487,7 @@ func (dve DefaultValueEncoders) SliceEncodeValue(ec EncodeContext, vw bsonrw.Val
for idx := 0; idx < val.Len(); idx++ {
currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
if lookupErr != nil && lookupErr != errInvalidValue {
if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
return lookupErr
}
@ -496,7 +496,7 @@ func (dve DefaultValueEncoders) SliceEncodeValue(ec EncodeContext, vw bsonrw.Val
return err
}
if lookupErr == errInvalidValue {
if errors.Is(lookupErr, errInvalidValue) {
err = vw.WriteNull()
if err != nil {
return err

View File

@ -17,13 +17,27 @@ import (
// EmptyInterfaceCodec is the Codec used for interface{} values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// EmptyInterfaceCodec registered.
// Deprecated: EmptyInterfaceCodec will not be directly configurable in Go
// Driver 2.0. To configure the empty interface encode and decode behavior, use
// the configuration methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the empty interface
// encode and decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to unmarshal BSON binary field
// values as a Go byte slice, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// BinaryAsSlice: true,
// })
//
// See the deprecation notice for each field in EmptyInterfaceCodec for the
// corresponding settings.
type EmptyInterfaceCodec struct {
// DecodeBinaryAsSlice causes DecodeValue to unmarshal BSON binary field values that are the
// "Generic" or "Old" BSON binary subtype as a Go byte slice instead of a primitive.Binary.
//
// Deprecated: Use bson.Decoder.BinaryAsSlice instead.
// Deprecated: Use bson.Decoder.BinaryAsSlice or options.BSONOptions.BinaryAsSlice instead.
DecodeBinaryAsSlice bool
}
@ -38,8 +52,8 @@ var (
// NewEmptyInterfaceCodec returns a EmptyInterfaceCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// EmptyInterfaceCodec registered.
// Deprecated: NewEmptyInterfaceCodec will not be available in Go Driver 2.0. See
// [EmptyInterfaceCodec] for more details.
func NewEmptyInterfaceCodec(opts ...*bsonoptions.EmptyInterfaceCodecOptions) *EmptyInterfaceCodec {
interfaceOpt := bsonoptions.MergeEmptyInterfaceCodecOptions(opts...)

View File

@ -8,6 +8,7 @@ package bsoncodec
import (
"encoding"
"errors"
"fmt"
"reflect"
"strconv"
@ -21,25 +22,40 @@ var defaultMapCodec = NewMapCodec()
// MapCodec is the Codec used for map values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// MapCodec registered.
// Deprecated: MapCodec will not be directly configurable in Go Driver 2.0. To
// configure the map encode and decode behavior, use the configuration methods
// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the map encode and
// decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to marshal nil Go maps as empty BSON
// documents, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// NilMapAsEmpty: true,
// })
//
// See the deprecation notice for each field in MapCodec for the corresponding
// settings.
type MapCodec struct {
// DecodeZerosMap causes DecodeValue to delete any existing values from Go maps in the destination
// value passed to Decode before unmarshaling BSON documents into them.
//
// Deprecated: Use bson.Decoder.ZeroMaps instead.
// Deprecated: Use bson.Decoder.ZeroMaps or options.BSONOptions.ZeroMaps instead.
DecodeZerosMap bool
// EncodeNilAsEmpty causes EncodeValue to marshal nil Go maps as empty BSON documents instead of
// BSON null.
//
// Deprecated: Use bson.Encoder.NilMapAsEmpty instead.
// Deprecated: Use bson.Encoder.NilMapAsEmpty or options.BSONOptions.NilMapAsEmpty instead.
EncodeNilAsEmpty bool
// EncodeKeysWithStringer causes the Encoder to convert Go map keys to BSON document field name
// strings using fmt.Sprintf() instead of the default string conversion logic.
//
// Deprecated: Use bson.Encoder.StringifyMapKeysWithFmt instead.
// Deprecated: Use bson.Encoder.StringifyMapKeysWithFmt or
// options.BSONOptions.StringifyMapKeysWithFmt instead.
EncodeKeysWithStringer bool
}
@ -61,8 +77,8 @@ type KeyUnmarshaler interface {
// NewMapCodec returns a MapCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// MapCodec registered.
// Deprecated: NewMapCodec will not be available in Go Driver 2.0. See
// [MapCodec] for more details.
func NewMapCodec(opts ...*bsonoptions.MapCodecOptions) *MapCodec {
mapOpt := bsonoptions.MergeMapCodecOptions(opts...)
@ -128,7 +144,7 @@ func (mc *MapCodec) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, v
}
currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.MapIndex(key))
if lookupErr != nil && lookupErr != errInvalidValue {
if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
return lookupErr
}
@ -137,7 +153,7 @@ func (mc *MapCodec) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, v
return err
}
if lookupErr == errInvalidValue {
if errors.Is(lookupErr, errInvalidValue) {
err = vw.WriteNull()
if err != nil {
return err
@ -200,7 +216,7 @@ func (mc *MapCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val ref
for {
key, vr, err := dr.ReadElement()
if err == bsonrw.ErrEOD {
if errors.Is(err, bsonrw.ErrEOD) {
break
}
if err != nil {
@ -313,7 +329,7 @@ func (mc *MapCodec) decodeKey(key string, keyType reflect.Type) (reflect.Value,
if mc.EncodeKeysWithStringer {
parsed, err := strconv.ParseFloat(key, 64)
if err != nil {
return keyVal, fmt.Errorf("Map key is defined to be a decimal type (%v) but got error %v", keyType.Kind(), err)
return keyVal, fmt.Errorf("Map key is defined to be a decimal type (%v) but got error %w", keyType.Kind(), err)
}
keyVal = reflect.ValueOf(parsed)
break

View File

@ -18,8 +18,16 @@ var _ ValueDecoder = &PointerCodec{}
// PointerCodec is the Codec used for pointers.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// PointerCodec registered.
// Deprecated: PointerCodec will not be directly accessible in Go Driver 2.0. To
// override the default pointer encode and decode behavior, create a new registry
// with [go.mongodb.org/mongo-driver/bson.NewRegistry] and register a new
// encoder and decoder for pointers.
//
// For example,
//
// reg := bson.NewRegistry()
// reg.RegisterKindEncoder(reflect.Ptr, myPointerEncoder)
// reg.RegisterKindDecoder(reflect.Ptr, myPointerDecoder)
type PointerCodec struct {
ecache typeEncoderCache
dcache typeDecoderCache
@ -27,8 +35,8 @@ type PointerCodec struct {
// NewPointerCodec returns a PointerCodec that has been initialized.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// PointerCodec registered.
// Deprecated: NewPointerCodec will not be available in Go Driver 2.0. See
// [PointerCodec] for more details.
func NewPointerCodec() *PointerCodec {
return &PointerCodec{}
}

View File

@ -7,6 +7,7 @@
package bsoncodec
import (
"errors"
"fmt"
"reflect"
@ -20,8 +21,22 @@ var defaultSliceCodec = NewSliceCodec()
// SliceCodec is the Codec used for slice values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// SliceCodec registered.
// Deprecated: SliceCodec will not be directly configurable in Go Driver 2.0. To
// configure the slice encode and decode behavior, use the configuration methods
// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the slice encode and
// decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to marshal nil Go slices as empty
// BSON arrays, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// NilSliceAsEmpty: true,
// })
//
// See the deprecation notice for each field in SliceCodec for the corresponding
// settings.
type SliceCodec struct {
// EncodeNilAsEmpty causes EncodeValue to marshal nil Go slices as empty BSON arrays instead of
// BSON null.
@ -32,8 +47,8 @@ type SliceCodec struct {
// NewSliceCodec returns a MapCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// SliceCodec registered.
// Deprecated: NewSliceCodec will not be available in Go Driver 2.0. See
// [SliceCodec] for more details.
func NewSliceCodec(opts ...*bsonoptions.SliceCodecOptions) *SliceCodec {
sliceOpt := bsonoptions.MergeSliceCodecOptions(opts...)
@ -93,7 +108,7 @@ func (sc SliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val re
for idx := 0; idx < val.Len(); idx++ {
currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.Index(idx))
if lookupErr != nil && lookupErr != errInvalidValue {
if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
return lookupErr
}
@ -102,7 +117,7 @@ func (sc SliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val re
return err
}
if lookupErr == errInvalidValue {
if errors.Is(lookupErr, errInvalidValue) {
err = vw.WriteNull()
if err != nil {
return err

View File

@ -17,8 +17,16 @@ import (
// StringCodec is the Codec used for string values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// StringCodec registered.
// Deprecated: StringCodec will not be directly accessible in Go Driver 2.0. To
// override the default string encode and decode behavior, create a new registry
// with [go.mongodb.org/mongo-driver/bson.NewRegistry] and register a new
// encoder and decoder for strings.
//
// For example,
//
// reg := bson.NewRegistry()
// reg.RegisterKindEncoder(reflect.String, myStringEncoder)
// reg.RegisterKindDecoder(reflect.String, myStringDecoder)
type StringCodec struct {
// DecodeObjectIDAsHex specifies if object IDs should be decoded as their hex representation.
// If false, a string made from the raw object ID bytes will be used. Defaults to true.
@ -38,8 +46,8 @@ var (
// NewStringCodec returns a StringCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// StringCodec registered.
// Deprecated: NewStringCodec will not be available in Go Driver 2.0. See
// [StringCodec] for more details.
func NewStringCodec(opts ...*bsonoptions.StringCodecOptions) *StringCodec {
stringOpt := bsonoptions.MergeStringCodecOptions(opts...)
return &StringCodec{*stringOpt.DecodeObjectIDAsHex}

View File

@ -60,8 +60,22 @@ type Zeroer interface {
// StructCodec is the Codec used for struct values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// StructCodec registered.
// Deprecated: StructCodec will not be directly configurable in Go Driver 2.0.
// To configure the struct encode and decode behavior, use the configuration
// methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the struct encode
// and decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to omit zero-value structs when
// using the "omitempty" struct tag, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// OmitZeroStruct: true,
// })
//
// See the deprecation notice for each field in StructCodec for the corresponding
// settings.
type StructCodec struct {
cache sync.Map // map[reflect.Type]*structDescription
parser StructTagParser
@ -69,7 +83,7 @@ type StructCodec struct {
// DecodeZeroStruct causes DecodeValue to delete any existing values from Go structs in the
// destination value passed to Decode before unmarshaling BSON documents into them.
//
// Deprecated: Use bson.Decoder.ZeroStructs instead.
// Deprecated: Use bson.Decoder.ZeroStructs or options.BSONOptions.ZeroStructs instead.
DecodeZeroStruct bool
// DecodeDeepZeroInline causes DecodeValue to delete any existing values from Go structs in the
@ -82,7 +96,7 @@ type StructCodec struct {
// MyStruct{}) as empty and omit it from the marshaled BSON when the "omitempty" struct tag
// option is set.
//
// Deprecated: Use bson.Encoder.OmitZeroStruct instead.
// Deprecated: Use bson.Encoder.OmitZeroStruct or options.BSONOptions.OmitZeroStruct instead.
EncodeOmitDefaultStruct bool
// AllowUnexportedFields allows encoding and decoding values from un-exported struct fields.
@ -95,7 +109,8 @@ type StructCodec struct {
// a duplicate field in the marshaled BSON when the "inline" struct tag option is set. The
// default value is true.
//
// Deprecated: Use bson.Encoder.ErrorOnInlineDuplicates instead.
// Deprecated: Use bson.Encoder.ErrorOnInlineDuplicates or
// options.BSONOptions.ErrorOnInlineDuplicates instead.
OverwriteDuplicatedInlinedFields bool
}
@ -104,8 +119,8 @@ var _ ValueDecoder = &StructCodec{}
// NewStructCodec returns a StructCodec that uses p for struct tag parsing.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// StructCodec registered.
// Deprecated: NewStructCodec will not be available in Go Driver 2.0. See
// [StructCodec] for more details.
func NewStructCodec(p StructTagParser, opts ...*bsonoptions.StructCodecOptions) (*StructCodec, error) {
if p == nil {
return nil, errors.New("a StructTagParser must be provided to NewStructCodec")
@ -164,11 +179,11 @@ func (sc *StructCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val
desc.encoder, rv, err = defaultValueEncoders.lookupElementEncoder(ec, desc.encoder, rv)
if err != nil && err != errInvalidValue {
if err != nil && !errors.Is(err, errInvalidValue) {
return err
}
if err == errInvalidValue {
if errors.Is(err, errInvalidValue) {
if desc.omitEmpty {
continue
}
@ -189,17 +204,17 @@ func (sc *StructCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val
encoder := desc.encoder
var zero bool
var empty bool
if cz, ok := encoder.(CodecZeroer); ok {
zero = cz.IsTypeZero(rv.Interface())
empty = cz.IsTypeZero(rv.Interface())
} else if rv.Kind() == reflect.Interface {
// isZero will not treat an interface rv as an interface, so we need to check for the
// zero interface separately.
zero = rv.IsNil()
// isEmpty will not treat an interface rv as an interface, so we need to check for the
// nil interface separately.
empty = rv.IsNil()
} else {
zero = isZero(rv, sc.EncodeOmitDefaultStruct || ec.omitZeroStruct)
empty = isEmpty(rv, sc.EncodeOmitDefaultStruct || ec.omitZeroStruct)
}
if desc.omitEmpty && zero {
if desc.omitEmpty && empty {
continue
}
@ -239,8 +254,8 @@ func (sc *StructCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val
}
func newDecodeError(key string, original error) error {
de, ok := original.(*DecodeError)
if !ok {
var de *DecodeError
if !errors.As(original, &de) {
return &DecodeError{
keys: []string{key},
wrapped: original,
@ -308,7 +323,7 @@ func (sc *StructCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val
for {
name, vr, err := dr.ReadElement()
if err == bsonrw.ErrEOD {
if errors.Is(err, bsonrw.ErrEOD) {
break
}
if err != nil {
@ -391,12 +406,15 @@ func (sc *StructCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val
return nil
}
func isZero(v reflect.Value, omitZeroStruct bool) bool {
func isEmpty(v reflect.Value, omitZeroStruct bool) bool {
kind := v.Kind()
if (kind != reflect.Ptr || !v.IsNil()) && v.Type().Implements(tZeroer) {
return v.Interface().(Zeroer).IsZero()
}
if kind == reflect.Struct {
switch kind {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Struct:
if !omitZeroStruct {
return false
}
@ -410,7 +428,7 @@ func isZero(v reflect.Value, omitZeroStruct bool) bool {
if ff.PkgPath != "" && !ff.Anonymous {
continue // Private field
}
if !isZero(v.Field(i), omitZeroStruct) {
if !isEmpty(v.Field(i), omitZeroStruct) {
return false
}
}

View File

@ -23,12 +23,26 @@ const (
// TimeCodec is the Codec used for time.Time values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// TimeCodec registered.
// Deprecated: TimeCodec will not be directly configurable in Go Driver 2.0.
// To configure the time.Time encode and decode behavior, use the configuration
// methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the time.Time encode
// and decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to ..., use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// UseLocalTimeZone: true,
// })
//
// See the deprecation notice for each field in TimeCodec for the corresponding
// settings.
type TimeCodec struct {
// UseLocalTimeZone specifies if we should decode into the local time zone. Defaults to false.
//
// Deprecated: Use bson.Decoder.UseLocalTimeZone instead.
// Deprecated: Use bson.Decoder.UseLocalTimeZone or options.BSONOptions.UseLocalTimeZone
// instead.
UseLocalTimeZone bool
}
@ -42,8 +56,8 @@ var (
// NewTimeCodec returns a TimeCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// TimeCodec registered.
// Deprecated: NewTimeCodec will not be available in Go Driver 2.0. See
// [TimeCodec] for more details.
func NewTimeCodec(opts ...*bsonoptions.TimeCodecOptions) *TimeCodec {
timeOpt := bsonoptions.MergeTimeCodecOptions(opts...)

View File

@ -18,13 +18,27 @@ import (
// UIntCodec is the Codec used for uint values.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// UIntCodec registered.
// Deprecated: UIntCodec will not be directly configurable in Go Driver 2.0. To
// configure the uint encode and decode behavior, use the configuration methods
// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the uint encode and
// decode behavior for a mongo.Client, use
// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
//
// For example, to configure a mongo.Client to marshal Go uint values as the
// minimum BSON int size that can represent the value, use:
//
// opt := options.Client().SetBSONOptions(&options.BSONOptions{
// IntMinSize: true,
// })
//
// See the deprecation notice for each field in UIntCodec for the corresponding
// settings.
type UIntCodec struct {
// EncodeToMinSize causes EncodeValue to marshal Go uint values (excluding uint64) as the
// minimum BSON int size (either 32-bit or 64-bit) that can represent the integer value.
//
// Deprecated: Use bson.Encoder.IntMinSize instead.
// Deprecated: Use bson.Encoder.IntMinSize or options.BSONOptions.IntMinSize instead.
EncodeToMinSize bool
}
@ -38,8 +52,8 @@ var (
// NewUIntCodec returns a UIntCodec with options opts.
//
// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with the
// UIntCodec registered.
// Deprecated: NewUIntCodec will not be available in Go Driver 2.0. See
// [UIntCodec] for more details.
func NewUIntCodec(opts ...*bsonoptions.UIntCodecOptions) *UIntCodec {
uintOpt := bsonoptions.MergeUIntCodecOptions(opts...)

View File

@ -7,6 +7,7 @@
package bsonrw
import (
"errors"
"fmt"
"io"
@ -442,7 +443,7 @@ func (c Copier) copyArray(dst ValueWriter, src ValueReader) error {
for {
vr, err := ar.ReadValue()
if err == ErrEOA {
if errors.Is(err, ErrEOA) {
break
}
if err != nil {
@ -466,7 +467,7 @@ func (c Copier) copyArray(dst ValueWriter, src ValueReader) error {
func (c Copier) copyDocumentCore(dw DocumentWriter, dr DocumentReader) error {
for {
key, vr, err := dr.ReadElement()
if err == ErrEOD {
if errors.Is(err, ErrEOD) {
break
}
if err != nil {

View File

@ -313,7 +313,7 @@ func (ejp *extJSONParser) readValue(t bsontype.Type) (*extJSONValue, error) {
// convert hex to bytes
bytes, err := hex.DecodeString(uuidNoHyphens)
if err != nil {
return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %v", err)
return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %w", err)
}
ejp.advanceState()

View File

@ -7,6 +7,7 @@
package bsonrw
import (
"errors"
"fmt"
"io"
"sync"
@ -613,7 +614,7 @@ func (ejvr *extJSONValueReader) ReadElement() (string, ValueReader, error) {
name, t, err := ejvr.p.readKey()
if err != nil {
if err == ErrEOD {
if errors.Is(err, ErrEOD) {
if ejvr.stack[ejvr.frame].mode == mCodeWithScope {
_, err := ejvr.p.peekType()
if err != nil {
@ -640,7 +641,7 @@ func (ejvr *extJSONValueReader) ReadValue() (ValueReader, error) {
t, err := ejvr.p.peekType()
if err != nil {
if err == ErrEOA {
if errors.Is(err, ErrEOA) {
ejvr.pop()
}

View File

@ -58,7 +58,7 @@ func (js *jsonScanner) nextToken() (*jsonToken, error) {
c, err = js.readNextByte()
}
if err == io.EOF {
if errors.Is(err, io.EOF) {
return &jsonToken{t: jttEOF}, nil
} else if err != nil {
return nil, err
@ -198,7 +198,7 @@ func (js *jsonScanner) scanString() (*jsonToken, error) {
for {
c, err = js.readNextByte()
if err != nil {
if err == io.EOF {
if errors.Is(err, io.EOF) {
return nil, errors.New("end of input in JSON string")
}
return nil, err
@ -209,7 +209,7 @@ func (js *jsonScanner) scanString() (*jsonToken, error) {
case '\\':
c, err = js.readNextByte()
if err != nil {
if err == io.EOF {
if errors.Is(err, io.EOF) {
return nil, errors.New("end of input in JSON string")
}
return nil, err
@ -248,7 +248,7 @@ func (js *jsonScanner) scanString() (*jsonToken, error) {
if utf16.IsSurrogate(rn) {
c, err = js.readNextByte()
if err != nil {
if err == io.EOF {
if errors.Is(err, io.EOF) {
return nil, errors.New("end of input in JSON string")
}
return nil, err
@ -264,7 +264,7 @@ func (js *jsonScanner) scanString() (*jsonToken, error) {
c, err = js.readNextByte()
if err != nil {
if err == io.EOF {
if errors.Is(err, io.EOF) {
return nil, errors.New("end of input in JSON string")
}
return nil, err
@ -325,17 +325,17 @@ func (js *jsonScanner) scanLiteral(first byte) (*jsonToken, error) {
c5, err := js.readNextByte()
if bytes.Equal([]byte("true"), lit) && (isValueTerminator(c5) || err == io.EOF) {
if bytes.Equal([]byte("true"), lit) && (isValueTerminator(c5) || errors.Is(err, io.EOF)) {
js.pos = int(math.Max(0, float64(js.pos-1)))
return &jsonToken{t: jttBool, v: true, p: p}, nil
} else if bytes.Equal([]byte("null"), lit) && (isValueTerminator(c5) || err == io.EOF) {
} else if bytes.Equal([]byte("null"), lit) && (isValueTerminator(c5) || errors.Is(err, io.EOF)) {
js.pos = int(math.Max(0, float64(js.pos-1)))
return &jsonToken{t: jttNull, v: nil, p: p}, nil
} else if bytes.Equal([]byte("fals"), lit) {
if c5 == 'e' {
c5, err = js.readNextByte()
if isValueTerminator(c5) || err == io.EOF {
if isValueTerminator(c5) || errors.Is(err, io.EOF) {
js.pos = int(math.Max(0, float64(js.pos-1)))
return &jsonToken{t: jttBool, v: false, p: p}, nil
}
@ -384,7 +384,7 @@ func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
for {
c, err = js.readNextByte()
if err != nil && err != io.EOF {
if err != nil && !errors.Is(err, io.EOF) {
return nil, err
}
@ -413,7 +413,7 @@ func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
case '}', ']', ',':
s = nssDone
default:
if isWhiteSpace(c) || err == io.EOF {
if isWhiteSpace(c) || errors.Is(err, io.EOF) {
s = nssDone
} else {
s = nssInvalid
@ -430,7 +430,7 @@ func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
case '}', ']', ',':
s = nssDone
default:
if isWhiteSpace(c) || err == io.EOF {
if isWhiteSpace(c) || errors.Is(err, io.EOF) {
s = nssDone
} else if isDigit(c) {
s = nssSawIntegerDigits
@ -455,7 +455,7 @@ func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
case '}', ']', ',':
s = nssDone
default:
if isWhiteSpace(c) || err == io.EOF {
if isWhiteSpace(c) || errors.Is(err, io.EOF) {
s = nssDone
} else if isDigit(c) {
s = nssSawFractionDigits
@ -490,7 +490,7 @@ func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
case '}', ']', ',':
s = nssDone
default:
if isWhiteSpace(c) || err == io.EOF {
if isWhiteSpace(c) || errors.Is(err, io.EOF) {
s = nssDone
} else if isDigit(c) {
s = nssSawExponentDigits

View File

@ -6,9 +6,9 @@
// Package bson is a library for reading, writing, and manipulating BSON. BSON is a binary serialization format used to
// store documents and make remote procedure calls in MongoDB. The BSON specification is located at https://bsonspec.org.
// The BSON library handles marshalling and unmarshalling of values through a configurable codec system. For a description
// of the codec system and examples of registering custom codecs, see the bsoncodec package. For additional information and
// usage examples, check out the [Work with BSON] page in the Go Driver docs site.
// The BSON library handles marshaling and unmarshaling of values through a configurable codec system. For a description
// of the codec system and examples of registering custom codecs, see the bsoncodec package. For additional information
// and usage examples, check out the [Work with BSON] page in the Go Driver docs site.
//
// # Raw BSON
//
@ -38,7 +38,7 @@
// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
//
// When decoding BSON to a D or M, the following type mappings apply when unmarshalling:
// When decoding BSON to a D or M, the following type mappings apply when unmarshaling:
//
// 1. BSON int32 unmarshals to an int32.
// 2. BSON int64 unmarshals to an int64.
@ -62,83 +62,78 @@
// 20. BSON DBPointer unmarshals to a primitive.DBPointer.
// 21. BSON symbol unmarshals to a primitive.Symbol.
//
// The above mappings also apply when marshalling a D or M to BSON. Some other useful marshalling mappings are:
// The above mappings also apply when marshaling a D or M to BSON. Some other useful marshaling mappings are:
//
// 1. time.Time marshals to a BSON datetime.
// 2. int8, int16, and int32 marshal to a BSON int32.
// 3. int marshals to a BSON int32 if the value is between math.MinInt32 and math.MaxInt32, inclusive, and a BSON int64
// otherwise.
// 4. int64 marshals to BSON int64.
// 4. int64 marshals to BSON int64 (unless [Encoder.IntMinSize] is set).
// 5. uint8 and uint16 marshal to a BSON int32.
// 6. uint, uint32, and uint64 marshal to a BSON int32 if the value is between math.MinInt32 and math.MaxInt32,
// inclusive, and BSON int64 otherwise.
// 7. BSON null and undefined values will unmarshal into the zero value of a field (e.g. unmarshalling a BSON null or
// 6. uint, uint32, and uint64 marshal to a BSON int64 (unless [Encoder.IntMinSize] is set).
// 7. BSON null and undefined values will unmarshal into the zero value of a field (e.g. unmarshaling a BSON null or
// undefined value into a string will yield the empty string.).
//
// # Structs
//
// Structs can be marshalled/unmarshalled to/from BSON or Extended JSON. When transforming structs to/from BSON or Extended
// Structs can be marshaled/unmarshaled to/from BSON or Extended JSON. When transforming structs to/from BSON or Extended
// JSON, the following rules apply:
//
// 1. Only exported fields in structs will be marshalled or unmarshalled.
// 1. Only exported fields in structs will be marshaled or unmarshaled.
//
// 2. When marshalling a struct, each field will be lowercased to generate the key for the corresponding BSON element.
// 2. When marshaling a struct, each field will be lowercased to generate the key for the corresponding BSON element.
// For example, a struct field named "Foo" will generate key "foo". This can be overridden via a struct tag (e.g.
// `bson:"fooField"` to generate key "fooField" instead).
//
// 3. An embedded struct field is marshalled as a subdocument. The key will be the lowercased name of the field's type.
// 3. An embedded struct field is marshaled as a subdocument. The key will be the lowercased name of the field's type.
//
// 4. A pointer field is marshalled as the underlying type if the pointer is non-nil. If the pointer is nil, it is
// marshalled as a BSON null value.
// 4. A pointer field is marshaled as the underlying type if the pointer is non-nil. If the pointer is nil, it is
// marshaled as a BSON null value.
//
// 5. When unmarshalling, a field of type interface{} will follow the D/M type mappings listed above. BSON documents
// unmarshalled into an interface{} field will be unmarshalled as a D.
// 5. When unmarshaling, a field of type interface{} will follow the D/M type mappings listed above. BSON documents
// unmarshaled into an interface{} field will be unmarshaled as a D.
//
// The encoding of each struct field can be customized by the "bson" struct tag.
//
// This tag behavior is configurable, and different struct tag behavior can be configured by initializing a new
// bsoncodec.StructCodec with the desired tag parser and registering that StructCodec onto the Registry. By default, JSON tags
// are not honored, but that can be enabled by creating a StructCodec with JSONFallbackStructTagParser, like below:
// bsoncodec.StructCodec with the desired tag parser and registering that StructCodec onto the Registry. By default, JSON
// tags are not honored, but that can be enabled by creating a StructCodec with JSONFallbackStructTagParser, like below:
//
// Example:
//
// structcodec, _ := bsoncodec.NewStructCodec(bsoncodec.JSONFallbackStructTagParser)
//
// The bson tag gives the name of the field, possibly followed by a comma-separated list of options.
// The name may be empty in order to specify options without overriding the default field name. The following options can be used
// to configure behavior:
// The name may be empty in order to specify options without overriding the default field name. The following options can
// be used to configure behavior:
//
// 1. omitempty: If the omitempty struct tag is specified on a field, the field will not be marshalled if it is set to
// the zero value. Fields with language primitive types such as integers, booleans, and strings are considered empty if
// their value is equal to the zero value for the type (i.e. 0 for integers, false for booleans, and "" for strings).
// Slices, maps, and arrays are considered empty if they are of length zero. Interfaces and pointers are considered
// empty if their value is nil. By default, structs are only considered empty if the struct type implements the
// bsoncodec.Zeroer interface and the IsZero method returns true. Struct fields whose types do not implement Zeroer are
// never considered empty and will be marshalled as embedded documents.
// 1. omitempty: If the omitempty struct tag is specified on a field, the field will be omitted from the marshaling if
// the field has an empty value, defined as false, 0, a nil pointer, a nil interface value, and any empty array,
// slice, map, or string.
// NOTE: It is recommended that this tag be used for all slice and map fields.
//
// 2. minsize: If the minsize struct tag is specified on a field of type int64, uint, uint32, or uint64 and the value of
// the field can fit in a signed int32, the field will be serialized as a BSON int32 rather than a BSON int64. For other
// types, this tag is ignored.
// the field can fit in a signed int32, the field will be serialized as a BSON int32 rather than a BSON int64. For
// other types, this tag is ignored.
//
// 3. truncate: If the truncate struct tag is specified on a field with a non-float numeric type, BSON doubles unmarshalled
// into that field will be truncated at the decimal point. For example, if 3.14 is unmarshalled into a field of type int,
// it will be unmarshalled as 3. If this tag is not specified, the decoder will throw an error if the value cannot be
// decoded without losing precision. For float64 or non-numeric types, this tag is ignored.
// 3. truncate: If the truncate struct tag is specified on a field with a non-float numeric type, BSON doubles
// unmarshaled into that field will be truncated at the decimal point. For example, if 3.14 is unmarshaled into a
// field of type int, it will be unmarshaled as 3. If this tag is not specified, the decoder will throw an error if
// the value cannot be decoded without losing precision. For float64 or non-numeric types, this tag is ignored.
//
// 4. inline: If the inline struct tag is specified for a struct or map field, the field will be "flattened" when
// marshalling and "un-flattened" when unmarshalling. This means that all of the fields in that struct/map will be
// pulled up one level and will become top-level fields rather than being fields in a nested document. For example, if a
// map field named "Map" with value map[string]interface{}{"foo": "bar"} is inlined, the resulting document will be
// {"foo": "bar"} instead of {"map": {"foo": "bar"}}. There can only be one inlined map field in a struct. If there are
// duplicated fields in the resulting document when an inlined struct is marshalled, the inlined field will be overwritten.
// If there are duplicated fields in the resulting document when an inlined map is marshalled, an error will be returned.
// This tag can be used with fields that are pointers to structs. If an inlined pointer field is nil, it will not be
// marshalled. For fields that are not maps or structs, this tag is ignored.
// marshaling and "un-flattened" when unmarshaling. This means that all of the fields in that struct/map will be
// pulled up one level and will become top-level fields rather than being fields in a nested document. For example,
// if a map field named "Map" with value map[string]interface{}{"foo": "bar"} is inlined, the resulting document will
// be {"foo": "bar"} instead of {"map": {"foo": "bar"}}. There can only be one inlined map field in a struct. If
// there are duplicated fields in the resulting document when an inlined struct is marshaled, the inlined field will
// be overwritten. If there are duplicated fields in the resulting document when an inlined map is marshaled, an
// error will be returned. This tag can be used with fields that are pointers to structs. If an inlined pointer field
// is nil, it will not be marshaled. For fields that are not maps or structs, this tag is ignored.
//
// # Marshalling and Unmarshalling
// # Marshaling and Unmarshaling
//
// Manually marshalling and unmarshalling can be done with the Marshal and Unmarshal family of functions.
// Manually marshaling and unmarshaling can be done with the Marshal and Unmarshal family of functions.
//
// [Work with BSON]: https://www.mongodb.com/docs/drivers/go/current/fundamentals/bson/
package bson

View File

@ -164,9 +164,6 @@ func (d Decimal128) BigInt() (*big.Int, int, error) {
// Would be handled by the logic below, but that's trivial and common.
if high == 0 && low == 0 && exp == 0 {
if posSign {
return new(big.Int), 0, nil
}
return new(big.Int), 0, nil
}

View File

@ -183,7 +183,7 @@ func processUniqueBytes() [5]byte {
var b [5]byte
_, err := io.ReadFull(rand.Reader, b[:])
if err != nil {
panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %v", err))
panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %w", err))
}
return b
@ -193,7 +193,7 @@ func readRandomUint32() uint32 {
var b [4]byte
_, err := io.ReadFull(rand.Reader, b[:])
if err != nil {
panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %v", err))
panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %w", err))
}
return (uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24)

View File

@ -120,8 +120,9 @@ type PoolEvent struct {
Reason string `json:"reason"`
// ServiceID is only set if the Type is PoolCleared and the server is deployed behind a load balancer. This field
// can be used to distinguish between individual servers in a load balanced deployment.
ServiceID *primitive.ObjectID `json:"serviceId"`
Error error `json:"error"`
ServiceID *primitive.ObjectID `json:"serviceId"`
Interruption bool `json:"interruptInUseConnections"`
Error error `json:"error"`
}
// PoolMonitor is a function that allows the user to gain access to events occurring in the pool

View File

@ -7,6 +7,7 @@
package csfle
import (
"errors"
"fmt"
"go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
@ -23,7 +24,7 @@ func GetEncryptedStateCollectionName(efBSON bsoncore.Document, dataCollectionNam
fieldName := stateCollection + "Collection"
val, err := efBSON.LookupErr(fieldName)
if err != nil {
if err != bsoncore.ErrElementNotFound {
if !errors.Is(err, bsoncore.ErrElementNotFound) {
return "", err
}
// Return default name.

View File

@ -183,7 +183,7 @@ func selectLogSink(sink LogSink) (LogSink, *os.File, error) {
if path != "" {
logFile, err := os.OpenFile(path, os.O_APPEND|os.O_CREATE|os.O_RDWR, 0666)
if err != nil {
return nil, nil, fmt.Errorf("unable to open log file: %v", err)
return nil, nil, fmt.Errorf("unable to open log file: %w", err)
}
return NewIOSink(logFile), logFile, nil

View File

@ -8,6 +8,7 @@ package mongo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/primitive"
@ -71,7 +72,7 @@ func (bw *bulkWrite) execute(ctx context.Context) error {
bwErr.WriteErrors = append(bwErr.WriteErrors, batchErr.WriteErrors...)
commandErrorOccurred := err != nil && err != driver.ErrUnacknowledgedWrite
commandErrorOccurred := err != nil && !errors.Is(err, driver.ErrUnacknowledgedWrite)
writeErrorOccurred := len(batchErr.WriteErrors) > 0 || batchErr.WriteConcernError != nil
if !continueOnError && (commandErrorOccurred || writeErrorOccurred) {
if err != nil {
@ -108,8 +109,8 @@ func (bw *bulkWrite) runBatch(ctx context.Context, batch bulkWriteBatch) (BulkWr
case *InsertOneModel:
res, err := bw.runInsert(ctx, batch)
if err != nil {
writeErr, ok := err.(driver.WriteCommandError)
if !ok {
var writeErr driver.WriteCommandError
if !errors.As(err, &writeErr) {
return BulkWriteResult{}, batchErr, err
}
writeErrors = writeErr.WriteErrors
@ -120,8 +121,8 @@ func (bw *bulkWrite) runBatch(ctx context.Context, batch bulkWriteBatch) (BulkWr
case *DeleteOneModel, *DeleteManyModel:
res, err := bw.runDelete(ctx, batch)
if err != nil {
writeErr, ok := err.(driver.WriteCommandError)
if !ok {
var writeErr driver.WriteCommandError
if !errors.As(err, &writeErr) {
return BulkWriteResult{}, batchErr, err
}
writeErrors = writeErr.WriteErrors
@ -132,8 +133,8 @@ func (bw *bulkWrite) runBatch(ctx context.Context, batch bulkWriteBatch) (BulkWr
case *ReplaceOneModel, *UpdateOneModel, *UpdateManyModel:
res, err := bw.runUpdate(ctx, batch)
if err != nil {
writeErr, ok := err.(driver.WriteCommandError)
if !ok {
var writeErr driver.WriteCommandError
if !errors.As(err, &writeErr) {
return BulkWriteResult{}, batchErr, err
}
writeErrors = writeErr.WriteErrors
@ -170,7 +171,7 @@ func (bw *bulkWrite) runInsert(ctx context.Context, batch bulkWriteBatch) (opera
if err != nil {
return operation.InsertResult{}, err
}
doc, _, err = ensureID(doc, primitive.NewObjectID(), bw.collection.bsonOpts, bw.collection.registry)
doc, _, err = ensureID(doc, primitive.NilObjectID, bw.collection.bsonOpts, bw.collection.registry)
if err != nil {
return operation.InsertResult{}, err
}

View File

@ -689,8 +689,8 @@ func (cs *ChangeStream) loopNext(ctx context.Context, nonBlocking bool) {
}
func (cs *ChangeStream) isResumableError() bool {
commandErr, ok := cs.err.(CommandError)
if !ok || commandErr.HasErrorLabel(networkErrorLabel) {
var commandErr CommandError
if !errors.As(cs.err, &commandErr) || commandErr.HasErrorLabel(networkErrorLabel) {
// All non-server errors or network errors are resumable.
return true
}

View File

@ -555,7 +555,7 @@ func (c *Client) newMongoCrypt(opts *options.AutoEncryptionOptions) (*mongocrypt
kmsProviders, err := marshal(opts.KmsProviders, c.bsonOpts, c.registry)
if err != nil {
return nil, fmt.Errorf("error creating KMS providers document: %v", err)
return nil, fmt.Errorf("error creating KMS providers document: %w", err)
}
// Set the crypt_shared library override path from the "cryptSharedLibPath" extra option if one

View File

@ -46,7 +46,7 @@ func NewClientEncryption(keyVaultClient *Client, opts ...*options.ClientEncrypti
kmsProviders, err := marshal(ceo.KmsProviders, nil, nil)
if err != nil {
return nil, fmt.Errorf("error creating KMS providers map: %v", err)
return nil, fmt.Errorf("error creating KMS providers map: %w", err)
}
mc, err := mongocrypt.NewMongoCrypt(mcopts.MongoCrypt().

View File

@ -256,7 +256,7 @@ func (coll *Collection) insert(ctx context.Context, documents []interface{},
if err != nil {
return nil, err
}
bsoncoreDoc, id, err := ensureID(bsoncoreDoc, primitive.NewObjectID(), coll.bsonOpts, coll.registry)
bsoncoreDoc, id, err := ensureID(bsoncoreDoc, primitive.NilObjectID, coll.bsonOpts, coll.registry)
if err != nil {
return nil, err
}
@ -313,8 +313,8 @@ func (coll *Collection) insert(ctx context.Context, documents []interface{},
op = op.Retry(retry)
err = op.Execute(ctx)
wce, ok := err.(driver.WriteCommandError)
if !ok {
var wce driver.WriteCommandError
if !errors.As(err, &wce) {
return result, err
}
@ -388,8 +388,8 @@ func (coll *Collection) InsertMany(ctx context.Context, documents []interface{},
}
imResult := &InsertManyResult{InsertedIDs: result}
writeException, ok := err.(WriteException)
if !ok {
var writeException WriteException
if !errors.As(err, &writeException) {
return imResult, err
}
@ -1806,7 +1806,7 @@ func (coll *Collection) Drop(ctx context.Context) error {
func (coll *Collection) dropEncryptedCollection(ctx context.Context, ef interface{}) error {
efBSON, err := marshal(ef, coll.bsonOpts, coll.registry)
if err != nil {
return fmt.Errorf("error transforming document: %v", err)
return fmt.Errorf("error transforming document: %w", err)
}
// Drop the two encryption-related, associated collections: `escCollection` and `ecocCollection`.

View File

@ -160,13 +160,13 @@ func (c *Cursor) next(ctx context.Context, nonBlocking bool) bool {
ctx = context.Background()
}
doc, err := c.batch.Next()
switch err {
case nil:
switch {
case err == nil:
// Consume the next document in the current batch.
c.batchLength--
c.Current = bson.Raw(doc)
return true
case io.EOF: // Need to do a getMore
case errors.Is(err, io.EOF): // Need to do a getMore
default:
c.err = err
return false
@ -204,12 +204,12 @@ func (c *Cursor) next(ctx context.Context, nonBlocking bool) bool {
c.batch = c.bc.Batch()
c.batchLength = c.batch.DocumentCount()
doc, err = c.batch.Next()
switch err {
case nil:
switch {
case err == nil:
c.batchLength--
c.Current = bson.Raw(doc)
return true
case io.EOF: // Empty batch so we continue
case errors.Is(err, io.EOF): // Empty batch so we continue
default:
c.err = err
return false

View File

@ -566,7 +566,7 @@ func (db *Database) getEncryptedFieldsFromServer(ctx context.Context, collection
}
collSpec := collSpecs[0]
rawValue, err := collSpec.Options.LookupErr("encryptedFields")
if err == bsoncore.ErrElementNotFound {
if errors.Is(err, bsoncore.ErrElementNotFound) {
return nil, nil
} else if err != nil {
return nil, err
@ -602,7 +602,7 @@ func (db *Database) getEncryptedFieldsFromMap(collectionName string) interface{}
func (db *Database) createCollectionWithEncryptedFields(ctx context.Context, name string, ef interface{}, opts ...*options.CreateCollectionOptions) error {
efBSON, err := marshal(ef, db.bsonOpts, db.registry)
if err != nil {
return fmt.Errorf("error transforming document: %v", err)
return fmt.Errorf("error transforming document: %w", err)
}
// Check the wire version to ensure server is 7.0.0 or newer.
@ -662,7 +662,7 @@ func (db *Database) createCollectionWithEncryptedFields(ctx context.Context, nam
// Create an index on the __safeContent__ field in the collection @collectionName.
if _, err := db.Collection(name).Indexes().CreateOne(ctx, IndexModel{Keys: bson.D{{"__safeContent__", 1}}}); err != nil {
return fmt.Errorf("error creating safeContent index: %v", err)
return fmt.Errorf("error creating safeContent index: %w", err)
}
return nil

View File

@ -182,7 +182,17 @@ func (writeServerSelector) SelectServer(t Topology, candidates []Server) ([]Serv
case Single, LoadBalanced:
return candidates, nil
default:
result := []Server{}
// Determine the capacity of the results slice.
selected := 0
for _, candidate := range candidates {
switch candidate.Kind {
case Mongos, RSPrimary, Standalone:
selected++
}
}
// Append candidates to the results slice.
result := make([]Server, 0, selected)
for _, candidate := range candidates {
switch candidate.Kind {
case Mongos, RSPrimary, Standalone:

View File

@ -52,7 +52,7 @@ func replaceErrors(err error) error {
return nil
}
if err == topology.ErrTopologyClosed {
if errors.Is(err, topology.ErrTopologyClosed) {
return ErrClientDisconnected
}
if de, ok := err.(driver.Error); ok {
@ -630,7 +630,7 @@ const (
// WriteConcernError will be returned over WriteErrors if both are present.
func processWriteError(err error) (returnResult, error) {
switch {
case err == driver.ErrUnacknowledgedWrite:
case errors.Is(err, driver.ErrUnacknowledgedWrite):
return rrAll, ErrUnacknowledgedWrite
case err != nil:
switch tt := err.(type) {

View File

@ -429,7 +429,7 @@ func (b *Bucket) openDownloadStream(filter interface{}, opts ...*options.FindOpt
// in the File type. After parsing it, use RawValue.Unmarshal to ensure File.ID is set to the appropriate value.
var foundFile File
if err = cursor.Decode(&foundFile); err != nil {
return nil, fmt.Errorf("error decoding files collection document: %v", err)
return nil, fmt.Errorf("error decoding files collection document: %w", err)
}
if foundFile.Length == 0 {
@ -594,7 +594,7 @@ func (b *Bucket) createIndexes(ctx context.Context) error {
docRes := cloned.FindOne(ctx, bson.D{}, options.FindOne().SetProjection(bson.D{{"_id", 1}}))
_, err = docRes.Raw()
if err != mongo.ErrNoDocuments {
if !errors.Is(err, mongo.ErrNoDocuments) {
// nil, or error that occurred during the FindOne operation
return err
}

View File

@ -160,7 +160,7 @@ func (ds *DownloadStream) Read(p []byte) (int, error) {
// Buffer is empty and can load in data from new chunk.
err = ds.fillBuffer(ctx)
if err != nil {
if err == errNoMoreChunks {
if errors.Is(err, errNoMoreChunks) {
if bytesCopied == 0 {
ds.done = true
return 0, io.EOF
@ -203,7 +203,7 @@ func (ds *DownloadStream) Skip(skip int64) (int64, error) {
// Buffer is empty and can load in data from new chunk.
err = ds.fillBuffer(ctx)
if err != nil {
if err == errNoMoreChunks {
if errors.Is(err, errNoMoreChunks) {
return skipped, nil
}
return skipped, err

View File

@ -177,8 +177,11 @@ func marshal(
}
// ensureID inserts the given ObjectID as an element named "_id" at the
// beginning of the given BSON document if there is not an "_id" already. If
// there is already an element named "_id", the document is not modified. It
// beginning of the given BSON document if there is not an "_id" already.
// If the given ObjectID is primitive.NilObjectID, a new object ID will be
// generated with time.Now().
//
// If there is already an element named "_id", the document is not modified. It
// returns the resulting document and the decoded Go value of the "_id" element.
func ensureID(
doc bsoncore.Document,
@ -219,6 +222,9 @@ func ensureID(
const extraSpace = 17
doc = make(bsoncore.Document, 0, len(olddoc)+extraSpace)
_, doc = bsoncore.ReserveLength(doc)
if oid.IsZero() {
oid = primitive.NewObjectID()
}
doc = bsoncore.AppendObjectIDElement(doc, "_id", oid)
// Remove and re-write the BSON document length header.

View File

@ -237,7 +237,6 @@ type ClientOptions struct {
ZstdLevel *int
err error
uri string
cs *connstring.ConnString
// AuthenticateToAnything skips server type checks when deciding if authentication is possible.
@ -338,7 +337,10 @@ func (c *ClientOptions) validate() error {
// GetURI returns the original URI used to configure the ClientOptions instance. If ApplyURI was not called during
// construction, this returns "".
func (c *ClientOptions) GetURI() string {
return c.uri
if c.cs == nil {
return ""
}
return c.cs.Original
}
// ApplyURI parses the given URI and sets options accordingly. The URI can contain host names, IPv4/IPv6 literals, or
@ -360,13 +362,12 @@ func (c *ClientOptions) ApplyURI(uri string) *ClientOptions {
return c
}
c.uri = uri
cs, err := connstring.ParseAndValidate(uri)
if err != nil {
c.err = err
return c
}
c.cs = &cs
c.cs = cs
if cs.AppName != "" {
c.AppName = &cs.AppName
@ -1134,9 +1135,6 @@ func MergeClientOptions(opts ...*ClientOptions) *ClientOptions {
if opt.err != nil {
c.err = opt.err
}
if opt.uri != "" {
c.uri = opt.uri
}
if opt.cs != nil {
c.cs = opt.cs
}

View File

@ -8,4 +8,4 @@
package version // import "go.mongodb.org/mongo-driver/version"
// Driver is the current version of the driver.
var Driver = "v1.13.1"
var Driver = "v1.14.0"

View File

@ -102,7 +102,7 @@ func (sc *saslConversation) Finish(ctx context.Context, cfg *Config, firstRespon
var saslResp saslResponse
err := bson.Unmarshal(firstResponse, &saslResp)
if err != nil {
fullErr := fmt.Errorf("unmarshal error: %v", err)
fullErr := fmt.Errorf("unmarshal error: %w", err)
return newError(fullErr, sc.mechanism)
}
@ -146,7 +146,7 @@ func (sc *saslConversation) Finish(ctx context.Context, cfg *Config, firstRespon
err = bson.Unmarshal(rdr, &saslResp)
if err != nil {
fullErr := fmt.Errorf("unmarshal error: %v", err)
fullErr := fmt.Errorf("unmarshal error: %w", err)
return newError(fullErr, sc.mechanism)
}
}

View File

@ -14,7 +14,6 @@ package auth
import (
"context"
"fmt"
"github.com/xdg-go/scram"
"github.com/xdg-go/stringprep"
@ -53,7 +52,7 @@ func newScramSHA1Authenticator(cred *Cred) (Authenticator, error) {
func newScramSHA256Authenticator(cred *Cred) (Authenticator, error) {
passprep, err := stringprep.SASLprep.Prepare(cred.Password)
if err != nil {
return nil, newAuthError(fmt.Sprintf("error SASLprepping password '%s'", cred.Password), err)
return nil, newAuthError("error SASLprepping password", err)
}
client, err := scram.SHA256.NewClientUnprepped(cred.Username, passprep, "")
if err != nil {

View File

@ -79,7 +79,7 @@ type CursorResponse struct {
func NewCursorResponse(info ResponseInfo) (CursorResponse, error) {
response := info.ServerResponse
cur, err := response.LookupErr("cursor")
if err == bsoncore.ErrElementNotFound {
if errors.Is(err, bsoncore.ErrElementNotFound) {
return CursorResponse{}, ErrNoCursor
}
if err != nil {
@ -142,7 +142,7 @@ func NewCursorResponse(info ResponseInfo) (CursorResponse, error) {
return CursorResponse{}, fmt.Errorf("expected Connection used to establish a cursor to implement PinnedConnection, but got %T", info.Connection)
}
if err := refConn.PinToCursor(); err != nil {
return CursorResponse{}, fmt.Errorf("error incrementing connection reference count when creating a cursor: %v", err)
return CursorResponse{}, fmt.Errorf("error incrementing connection reference count when creating a cursor: %w", err)
}
curresp.Connection = refConn
}

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ package driver
import (
"context"
"crypto/tls"
"errors"
"fmt"
"io"
"strings"
@ -399,7 +400,7 @@ func (c *crypt) decryptKey(kmsCtx *mongocrypt.KmsContext) error {
res := make([]byte, bytesNeeded)
bytesRead, err := conn.Read(res)
if err != nil && err != io.EOF {
if err != nil && !errors.Is(err, io.EOF) {
return err
}

View File

@ -104,8 +104,8 @@ func (r *Resolver) fetchSeedlistFromSRV(host string, srvName string, stopOnErr b
}
func validateSRVResult(recordFromSRV, inputHostName string) error {
separatedInputDomain := strings.Split(inputHostName, ".")
separatedRecord := strings.Split(recordFromSRV, ".")
separatedInputDomain := strings.Split(strings.ToLower(inputHostName), ".")
separatedRecord := strings.Split(strings.ToLower(recordFromSRV), ".")
if len(separatedRecord) < 2 {
return errors.New("DNS name must contain at least 2 labels")
}

Some files were not shown because too many files have changed in this diff Show More